From 6c2480cad98891339146af0ac48739bce1832287 Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Wed, 6 Sep 2023 12:54:44 -0700 Subject: [PATCH] deps: upgrade npm to 10.0.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/49423 Reviewed-By: Matteo Collina Reviewed-By: Debadree Chatterjee Reviewed-By: Richard Lau Reviewed-By: Tobias Nießen Reviewed-By: Ruy Adorno Reviewed-By: Luigi Pinca Reviewed-By: Jiawen Geng --- deps/npm/docs/content/commands/npm-ls.md | 2 +- deps/npm/docs/content/commands/npm-pkg.md | 1 - deps/npm/docs/content/commands/npm.md | 2 +- .../content/configuring-npm/package-json.md | 7 +- deps/npm/docs/content/using-npm/config.md | 28 - deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm-pkg.html | 1 - deps/npm/docs/output/commands/npm.html | 2 +- .../output/configuring-npm/package-json.html | 7 +- deps/npm/docs/output/using-npm/config.html | 24 +- deps/npm/lib/commands/audit.js | 6 +- deps/npm/lib/commands/run-script.js | 16 +- deps/npm/lib/commands/search.js | 4 + deps/npm/lib/utils/format-search-stream.js | 2 +- deps/npm/man/man1/npm-access.1 | 2 +- deps/npm/man/man1/npm-adduser.1 | 2 +- deps/npm/man/man1/npm-audit.1 | 2 +- deps/npm/man/man1/npm-bugs.1 | 2 +- deps/npm/man/man1/npm-cache.1 | 2 +- deps/npm/man/man1/npm-ci.1 | 2 +- deps/npm/man/man1/npm-completion.1 | 2 +- deps/npm/man/man1/npm-config.1 | 2 +- deps/npm/man/man1/npm-dedupe.1 | 2 +- deps/npm/man/man1/npm-deprecate.1 | 2 +- deps/npm/man/man1/npm-diff.1 | 2 +- deps/npm/man/man1/npm-dist-tag.1 | 2 +- deps/npm/man/man1/npm-docs.1 | 2 +- deps/npm/man/man1/npm-doctor.1 | 2 +- deps/npm/man/man1/npm-edit.1 | 2 +- deps/npm/man/man1/npm-exec.1 | 2 +- deps/npm/man/man1/npm-explain.1 | 2 +- deps/npm/man/man1/npm-explore.1 | 2 +- deps/npm/man/man1/npm-find-dupes.1 | 2 +- deps/npm/man/man1/npm-fund.1 | 2 +- deps/npm/man/man1/npm-help-search.1 | 2 +- deps/npm/man/man1/npm-help.1 | 2 +- deps/npm/man/man1/npm-hook.1 | 2 +- deps/npm/man/man1/npm-init.1 | 2 +- deps/npm/man/man1/npm-install-ci-test.1 | 2 +- deps/npm/man/man1/npm-install-test.1 | 2 +- deps/npm/man/man1/npm-install.1 | 2 +- deps/npm/man/man1/npm-link.1 | 2 +- deps/npm/man/man1/npm-login.1 | 2 +- deps/npm/man/man1/npm-logout.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 4 +- deps/npm/man/man1/npm-org.1 | 2 +- deps/npm/man/man1/npm-outdated.1 | 2 +- deps/npm/man/man1/npm-owner.1 | 2 +- deps/npm/man/man1/npm-pack.1 | 2 +- deps/npm/man/man1/npm-ping.1 | 2 +- deps/npm/man/man1/npm-pkg.1 | 4 +- deps/npm/man/man1/npm-prefix.1 | 2 +- deps/npm/man/man1/npm-profile.1 | 2 +- deps/npm/man/man1/npm-prune.1 | 2 +- deps/npm/man/man1/npm-publish.1 | 2 +- deps/npm/man/man1/npm-query.1 | 2 +- deps/npm/man/man1/npm-rebuild.1 | 2 +- deps/npm/man/man1/npm-repo.1 | 2 +- deps/npm/man/man1/npm-restart.1 | 2 +- deps/npm/man/man1/npm-root.1 | 2 +- deps/npm/man/man1/npm-run-script.1 | 2 +- deps/npm/man/man1/npm-search.1 | 2 +- deps/npm/man/man1/npm-shrinkwrap.1 | 2 +- deps/npm/man/man1/npm-star.1 | 2 +- deps/npm/man/man1/npm-stars.1 | 2 +- deps/npm/man/man1/npm-start.1 | 2 +- deps/npm/man/man1/npm-stop.1 | 2 +- deps/npm/man/man1/npm-team.1 | 2 +- deps/npm/man/man1/npm-test.1 | 2 +- deps/npm/man/man1/npm-token.1 | 2 +- deps/npm/man/man1/npm-uninstall.1 | 2 +- deps/npm/man/man1/npm-unpublish.1 | 2 +- deps/npm/man/man1/npm-unstar.1 | 2 +- deps/npm/man/man1/npm-update.1 | 2 +- deps/npm/man/man1/npm-version.1 | 2 +- deps/npm/man/man1/npm-view.1 | 2 +- deps/npm/man/man1/npm-whoami.1 | 2 +- deps/npm/man/man1/npm.1 | 4 +- deps/npm/man/man1/npx.1 | 2 +- deps/npm/man/man5/folders.5 | 2 +- deps/npm/man/man5/install.5 | 2 +- deps/npm/man/man5/npm-global.5 | 2 +- deps/npm/man/man5/npm-json.5 | 10 +- deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +- deps/npm/man/man5/npmrc.5 | 2 +- deps/npm/man/man5/package-json.5 | 10 +- deps/npm/man/man5/package-lock-json.5 | 2 +- deps/npm/man/man7/config.7 | 26 +- deps/npm/man/man7/dependency-selectors.7 | 2 +- deps/npm/man/man7/developers.7 | 2 +- deps/npm/man/man7/logging.7 | 2 +- deps/npm/man/man7/orgs.7 | 2 +- deps/npm/man/man7/package-spec.7 | 2 +- deps/npm/man/man7/registry.7 | 2 +- deps/npm/man/man7/removal.7 | 2 +- deps/npm/man/man7/scope.7 | 2 +- deps/npm/man/man7/scripts.7 | 2 +- deps/npm/man/man7/workspaces.7 | 2 +- .../node_modules/@npmcli/agent/lib/agents.js | 201 +++ .../npm/node_modules/@npmcli/agent/lib/dns.js | 53 + .../node_modules/@npmcli/agent/lib/errors.js | 65 + .../node_modules/@npmcli/agent/lib/index.js | 46 + .../node_modules/@npmcli/agent/lib/options.js | 74 + .../node_modules/@npmcli/agent/lib/proxy.js | 96 ++ .../node_modules/@npmcli/agent/lib/util.js | 84 ++ .../node_modules/agent-base/dist/helpers.js | 66 + .../node_modules/agent-base/dist/index.js | 112 ++ .../node_modules/agent-base/package.json | 49 + .../node_modules/http-proxy-agent}/LICENSE | 5 +- .../http-proxy-agent/dist/index.js | 147 ++ .../http-proxy-agent/package.json | 47 + .../https-proxy-agent/dist/index.js | 170 +++ .../dist/parse-proxy-response.js | 98 ++ .../https-proxy-agent/package.json | 50 + .../socks-proxy-agent/dist/index.js | 181 +++ .../socks-proxy-agent/package.json | 142 ++ .../node_modules/@npmcli/agent/package.json | 64 + .../@npmcli/arborist/package.json | 39 +- .../config/lib/definitions/definitions.js | 40 +- .../@npmcli/config/lib/definitions/index.js | 3 - .../node_modules/@npmcli/config/lib/index.js | 14 +- .../node_modules/@npmcli/config/package.json | 4 +- deps/npm/node_modules/@npmcli/git/lib/revs.js | 4 +- .../npm/node_modules/@npmcli/git/package.json | 26 +- .../@npmcli/metavuln-calculator/package.json | 20 +- .../@npmcli/package-json/package.json | 22 +- .../@npmcli/promise-spawn/package.json | 18 +- .../@npmcli/run-script/package.json | 18 +- .../npm/node_modules/@sigstore/bundle/LICENSE | 202 +++ .../@sigstore/bundle/dist/build.js | 89 ++ .../@sigstore/bundle/dist/bundle.js | 22 + .../@sigstore/bundle/dist/error.js | 25 + .../@sigstore/bundle/dist/index.js | 40 + .../@sigstore/bundle/dist/serialized.js | 38 + .../bundle/dist/utility.js} | 0 .../bundle/dist}/validate.js | 82 +- .../@sigstore/bundle/package.json | 35 + .../dist/__generated__/envelope.js | 14 +- .../dist/__generated__/events.js | 185 +++ .../google/api/field_behavior.js | 6 +- .../dist/__generated__/google/protobuf/any.js | 65 + .../google/protobuf/descriptor.js | 38 +- .../dist/__generated__/sigstore_common.js | 26 +- .../dist/__generated__/sigstore_rekor.js | 14 +- .../__generated__/sigstore_verification.js | 14 +- .../@sigstore/protobuf-specs/package.json | 2 +- deps/npm/node_modules/@sigstore/sign/LICENSE | 202 +++ .../@sigstore/sign/dist/bundler/base.js | 50 + .../@sigstore/sign/dist/bundler/bundle.js | 70 + .../@sigstore/sign/dist/bundler/dsse.js | 45 + .../@sigstore/sign/dist/bundler/index.js | 7 + .../@sigstore/sign/dist/bundler/message.js | 30 + .../node_modules/@sigstore/sign/dist/error.js | 39 + .../@sigstore/sign/dist/external/error.js | 38 + .../sign}/dist/external/fulcio.js | 4 +- .../sign}/dist/external/rekor.js | 10 +- .../sign}/dist/external/tsa.js | 2 +- .../sign}/dist/identity/ci.js | 10 +- .../@sigstore/sign/dist/identity/index.js | 20 + .../sign/dist/identity/provider.js} | 0 .../node_modules/@sigstore/sign/dist/index.js | 17 + .../@sigstore/sign/dist/signer/fulcio/ca.js | 60 + .../sign/dist/signer/fulcio/ephemeral.js | 45 + .../sign/dist/signer/fulcio/index.js | 87 ++ .../@sigstore/sign/dist/signer/index.js | 21 + .../sign/dist/signer/signer.js} | 6 +- .../@sigstore/sign/dist/types/fetch.js | 2 + .../@sigstore/sign/dist/util/crypto.js | 27 + .../@sigstore/sign/dist/util/dsse.js | 25 + .../@sigstore/sign/dist/util/encoding.js | 28 + .../@sigstore/sign/dist/util/index.js | 48 + .../@sigstore/sign/dist/util/json.js | 61 + .../sign}/dist/util/oidc.js | 2 +- .../@sigstore/sign/dist/util/pem.js | 27 + .../sign}/dist/util/ua.js | 2 +- .../@sigstore/sign/dist/witness/index.js | 23 + .../sign/dist/witness/tlog/client.js} | 38 +- .../sign/dist/witness/tlog/entry.js} | 112 +- .../@sigstore/sign/dist/witness/tlog/index.js | 81 ++ .../sign/dist/witness/tsa/client.js} | 16 +- .../@sigstore/sign/dist/witness/tsa/index.js | 44 + .../@sigstore/sign/dist/witness/witness.js | 2 + .../node_modules/@sigstore/sign/package.json | 42 + .../node_modules/@sigstore/tuf/dist/client.js | 15 +- .../node_modules/@sigstore/tuf/package.json | 10 +- .../@tufjs/canonical-json/package.json | 10 +- .../node_modules/@tufjs/models/dist/base.js | 2 +- .../node_modules/@tufjs/models/package.json | 12 +- .../node_modules/agentkeepalive/lib/agent.js | 8 +- .../agentkeepalive/lib/https_agent.js | 4 +- .../node_modules/agentkeepalive/package.json | 6 +- .../node_modules/cacache/lib/memoization.js | 4 +- deps/npm/node_modules/cacache/package.json | 20 +- deps/npm/node_modules/depd/History.md | 103 -- deps/npm/node_modules/depd/index.js | 538 -------- .../node_modules/depd/lib/browser/index.js | 77 -- deps/npm/node_modules/depd/package.json | 45 - .../npm/node_modules/fs-minipass/package.json | 8 +- deps/npm/node_modules/glob/README.md | 4 +- .../node_modules/glob/dist/cjs/package.json | 2 +- .../npm/node_modules/glob/dist/cjs/src/bin.js | 16 +- .../node_modules/glob/dist/cjs/src/bin.js.map | 2 +- .../glob/dist/cjs/src/glob.d.ts.map | 2 +- .../node_modules/glob/dist/cjs/src/glob.js | 4 + .../glob/dist/cjs/src/glob.js.map | 2 +- .../node_modules/glob/dist/mjs/glob.d.ts.map | 2 +- deps/npm/node_modules/glob/dist/mjs/glob.js | 4 + .../node_modules/glob/dist/mjs/glob.js.map | 2 +- .../node_modules/glob/dist/mjs/package.json | 2 +- deps/npm/node_modules/glob/package.json | 8 +- .../node_modules/hosted-git-info/lib/index.js | 4 +- .../node_modules/hosted-git-info/package.json | 17 +- .../init-package-json/package.json | 21 +- .../node_modules/libnpmaccess/package.json | 20 +- deps/npm/node_modules/libnpmdiff/package.json | 20 +- deps/npm/node_modules/libnpmexec/package.json | 22 +- deps/npm/node_modules/libnpmfund/package.json | 6 +- deps/npm/node_modules/libnpmhook/package.json | 18 +- deps/npm/node_modules/libnpmorg/package.json | 20 +- deps/npm/node_modules/libnpmpack/package.json | 24 +- .../libnpmpublish/lib/provenance.js | 2 +- .../node_modules/libnpmpublish/lib/publish.js | 89 +- .../node_modules/libnpmpublish/package.json | 27 +- .../node_modules/libnpmsearch/package.json | 18 +- deps/npm/node_modules/libnpmteam/package.json | 18 +- .../node_modules/libnpmversion/package.json | 18 +- .../lru-cache/dist/cjs/index.js | 13 +- .../lru-cache/dist/cjs/index.min.js | 2 + .../lru-cache/dist/cjs/package.json | 0 .../lru-cache/dist/mjs/index.js | 13 +- .../lru-cache/dist/mjs/index.min.js | 2 + .../lru-cache/dist/mjs/package.json | 0 deps/npm/node_modules/lru-cache/package.json | 70 +- .../make-fetch-happen/lib/remote.js | 10 +- .../make-fetch-happen/package.json | 24 +- .../node_modules/minipass-fetch/package.json | 8 +- .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++ .../minipass/dist/mjs/package.json | 3 + deps/npm/node_modules/minipass/package.json | 100 +- .../node-gyp/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 +++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 +++++ .../node-gyp/node_modules/cacache/lib/get.js | 170 +++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node-gyp/node_modules/cacache/lib/put.js | 80 ++ .../node-gyp/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 ++++ .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/index.js | 202 +++ .../node_modules/brace-expansion/package.json | 46 + .../cacache/node_modules/glob/LICENSE | 15 + .../cacache/node_modules/glob/README.md | 1214 +++++++++++++++++ .../node_modules/glob/dist/cjs/package.json | 4 + .../node_modules/glob/dist/cjs/src/bin.d.ts | 3 + .../glob/dist/cjs/src/bin.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/bin.js | 270 ++++ .../node_modules/glob/dist/cjs/src/bin.js.map | 1 + .../node_modules/glob/dist/cjs/src/glob.d.ts | 344 +++++ .../glob/dist/cjs/src/glob.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/glob.js | 238 ++++ .../glob/dist/cjs/src/glob.js.map | 1 + .../glob/dist/cjs/src/has-magic.d.ts | 14 + .../glob/dist/cjs/src/has-magic.d.ts.map | 1 + .../glob/dist/cjs/src/has-magic.js | 27 + .../glob/dist/cjs/src/has-magic.js.map | 1 + .../glob/dist/cjs/src/ignore.d.ts | 20 + .../glob/dist/cjs/src/ignore.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/ignore.js | 103 ++ .../glob/dist/cjs/src/ignore.js.map | 1 + .../node_modules/glob/dist/cjs/src/index.d.ts | 95 ++ .../glob/dist/cjs/src/index.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/index.js | 68 + .../glob/dist/cjs/src/index.js.map | 1 + .../glob/dist/cjs/src/pattern.d.ts | 77 ++ .../glob/dist/cjs/src/pattern.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/pattern.js | 219 +++ .../glob/dist/cjs/src/pattern.js.map | 1 + .../glob/dist/cjs/src/processor.d.ts | 59 + .../glob/dist/cjs/src/processor.d.ts.map | 1 + .../glob/dist/cjs/src/processor.js | 309 +++++ .../glob/dist/cjs/src/processor.js.map | 1 + .../glob/dist/cjs/src/walker.d.ts | 96 ++ .../glob/dist/cjs/src/walker.d.ts.map | 1 + .../node_modules/glob/dist/cjs/src/walker.js | 358 +++++ .../glob/dist/cjs/src/walker.js.map | 1 + .../node_modules/glob/dist/mjs/glob.d.ts | 344 +++++ .../node_modules/glob/dist/mjs/glob.d.ts.map | 1 + .../node_modules/glob/dist/mjs/glob.js | 234 ++++ .../node_modules/glob/dist/mjs/glob.js.map | 1 + .../node_modules/glob/dist/mjs/has-magic.d.ts | 14 + .../glob/dist/mjs/has-magic.d.ts.map | 1 + .../node_modules/glob/dist/mjs/has-magic.js | 23 + .../glob/dist/mjs/has-magic.js.map | 1 + .../node_modules/glob/dist/mjs/ignore.d.ts | 20 + .../glob/dist/mjs/ignore.d.ts.map | 1 + .../node_modules/glob/dist/mjs/ignore.js | 99 ++ .../node_modules/glob/dist/mjs/ignore.js.map | 1 + .../node_modules/glob/dist/mjs/index.d.ts | 95 ++ .../node_modules/glob/dist/mjs/index.d.ts.map | 1 + .../node_modules/glob/dist/mjs/index.js | 56 + .../node_modules/glob/dist/mjs/index.js.map | 1 + .../node_modules/glob/dist/mjs/package.json | 4 + .../node_modules/glob/dist/mjs/pattern.d.ts | 77 ++ .../glob/dist/mjs/pattern.d.ts.map | 1 + .../node_modules/glob/dist/mjs/pattern.js | 215 +++ .../node_modules/glob/dist/mjs/pattern.js.map | 1 + .../node_modules/glob/dist/mjs/processor.d.ts | 59 + .../glob/dist/mjs/processor.d.ts.map | 1 + .../node_modules/glob/dist/mjs/processor.js | 302 ++++ .../glob/dist/mjs/processor.js.map | 1 + .../node_modules/glob/dist/mjs/walker.d.ts | 96 ++ .../glob/dist/mjs/walker.d.ts.map | 1 + .../node_modules/glob/dist/mjs/walker.js | 352 +++++ .../node_modules/glob/dist/mjs/walker.js.map | 1 + .../cacache/node_modules/glob/package.json | 98 ++ .../cacache/node_modules/minimatch/LICENSE | 15 + .../dist/cjs/assert-valid-pattern.js | 14 + .../node_modules/minimatch/dist/cjs/ast.js | 589 ++++++++ .../minimatch/dist/cjs/brace-expressions.js | 152 +++ .../node_modules/minimatch/dist/cjs/escape.js | 22 + .../node_modules/minimatch/dist/cjs/index.js | 1011 ++++++++++++++ .../minimatch/dist/cjs/package.json | 3 + .../minimatch/dist/cjs/unescape.js | 24 + .../dist/mjs/assert-valid-pattern.js | 10 + .../node_modules/minimatch/dist/mjs/ast.js | 585 ++++++++ .../minimatch/dist/mjs/brace-expressions.js | 148 ++ .../node_modules/minimatch/dist/mjs/escape.js | 18 + .../node_modules/minimatch/dist/mjs/index.js | 995 ++++++++++++++ .../minimatch/dist/mjs/package.json | 3 + .../minimatch/dist/mjs/unescape.js | 20 + .../node_modules/minimatch/package.json | 86 ++ .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 ++ .../node_modules/cacache/package.json | 82 ++ .../node_modules/lru-cache/LICENSE | 0 .../node_modules}/lru-cache/index.js | 0 .../node_modules}/lru-cache/index.mjs | 0 .../node_modules/lru-cache/package.json | 65 +- .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/agent.js | 0 .../make-fetch-happen/lib/cache/entry.js | 469 +++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../make-fetch-happen/lib/dns.js | 0 .../make-fetch-happen/lib/fetch.js | 118 ++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 121 ++ .../make-fetch-happen/package.json | 78 ++ .../node-gyp/node_modules/minipass/LICENSE | 15 + .../node_modules}/minipass/index.js | 0 .../node_modules}/minipass/index.mjs | 1 - .../node_modules/minipass/package.json | 76 ++ .../normalize-package-data/package.json | 19 +- .../npm-install-checks/lib/index.js | 6 +- .../npm-install-checks/package.json | 6 +- .../node_modules/npm-package-arg/lib/npa.js | 47 +- .../node_modules/npm-package-arg/package.json | 17 +- .../node_modules/npm-packlist/lib/index.js | 34 +- .../node_modules/npm-packlist/package.json | 7 +- .../npm-pick-manifest/lib/index.js | 2 +- .../npm-pick-manifest/package.json | 17 +- .../npm/node_modules/npm-profile/package.json | 17 +- .../npm-registry-fetch/package.json | 24 +- deps/npm/node_modules/pacote/lib/registry.js | 4 +- deps/npm/node_modules/pacote/package.json | 38 +- .../path-scurry/dist/cjs/index.js | 23 + .../path-scurry/dist/mjs/index.js | 23 + .../lru-cache/dist/cjs/index.min.js | 2 - .../lru-cache/dist/mjs/index.min.js | 2 - .../npm/node_modules/path-scurry/package.json | 10 +- .../read-package-json/package.json | 18 +- deps/npm/node_modules/sigstore/README.md | 165 --- .../node_modules/sigstore/dist/ca/format.d.ts | 5 - .../node_modules/sigstore/dist/ca/format.js | 20 - .../node_modules/sigstore/dist/ca/index.d.ts | 15 - .../node_modules/sigstore/dist/ca/index.js | 39 - .../sigstore/dist/ca/verify/chain.d.ts | 3 - .../sigstore/dist/ca/verify/index.d.ts | 2 - .../sigstore/dist/ca/verify/sct.d.ts | 3 - .../sigstore/dist/ca/verify/signer.d.ts | 3 - .../sigstore/dist/ca/verify/signer.js | 5 +- .../node_modules/sigstore/dist/cli/index.d.ts | 1 - .../node_modules/sigstore/dist/cli/index.js | 125 -- .../node_modules/sigstore/dist/config.d.ts | 54 - deps/npm/node_modules/sigstore/dist/config.js | 117 +- .../npm/node_modules/sigstore/dist/error.d.ts | 20 - deps/npm/node_modules/sigstore/dist/error.js | 21 +- .../sigstore/dist/external/error.d.ts | 10 - .../sigstore/dist/external/error.js | 21 - .../sigstore/dist/external/fulcio.d.ts | 38 - .../sigstore/dist/external/index.d.ts | 4 - .../sigstore/dist/external/index.js | 26 - .../sigstore/dist/external/rekor.d.ts | 41 - .../sigstore/dist/external/tsa.d.ts | 18 - .../sigstore/dist/identity/ci.d.ts | 11 - .../sigstore/dist/identity/index.d.ts | 30 - .../sigstore/dist/identity/index.js | 51 - .../sigstore/dist/identity/issuer.d.ts | 15 - .../sigstore/dist/identity/issuer.js | 53 - .../sigstore/dist/identity/oauth.d.ts | 26 - .../sigstore/dist/identity/oauth.js | 197 --- .../sigstore/dist/identity/provider.d.ts | 3 - .../npm/node_modules/sigstore/dist/index.d.ts | 2 - deps/npm/node_modules/sigstore/dist/index.js | 57 +- deps/npm/node_modules/sigstore/dist/sign.d.ts | 28 - deps/npm/node_modules/sigstore/dist/sign.js | 120 -- .../sigstore/dist/sigstore-utils.d.ts | 8 - .../sigstore/dist/sigstore-utils.js | 80 -- .../node_modules/sigstore/dist/sigstore.d.ts | 23 - .../node_modules/sigstore/dist/sigstore.js | 112 +- .../sigstore/dist/tlog/format.d.ts | 7 - .../sigstore/dist/tlog/index.d.ts | 23 - .../sigstore/dist/tlog/verify/body.d.ts | 2 - .../sigstore/dist/tlog/verify/checkpoint.js | 148 ++ .../sigstore/dist/tlog/verify/index.d.ts | 2 - .../sigstore/dist/tlog/verify/index.js | 71 +- .../sigstore/dist/tlog/verify/merkle.d.ts | 2 - .../sigstore/dist/tlog/verify/merkle.js | 12 +- .../sigstore/dist/tlog/verify/set.d.ts | 2 - .../sigstore/dist/tlog/verify/set.js | 5 +- .../node_modules/sigstore/dist/tsa/index.d.ts | 13 - .../sigstore/dist/types/fetch.d.ts | 6 - .../sigstore/dist/types/signature.d.ts | 16 - .../sigstore/dist/types/signature.js | 15 - .../sigstore/dist/types/sigstore.js | 27 + .../sigstore/dist/types/sigstore/index.d.ts | 42 - .../sigstore/dist/types/sigstore/index.js | 162 --- .../dist/types/sigstore/serialized.d.ts | 65 - .../dist/types/sigstore/validate.d.ts | 15 - .../sigstore/dist/types/utility.d.ts | 14 - .../sigstore/dist/types/utility.js | 1 - .../sigstore/dist/util/asn1/dump.d.ts | 2 - .../sigstore/dist/util/asn1/error.d.ts | 4 - .../sigstore/dist/util/asn1/index.d.ts | 1 - .../sigstore/dist/util/asn1/length.d.ts | 4 - .../sigstore/dist/util/asn1/obj.d.ts | 15 - .../sigstore/dist/util/asn1/parse.d.ts | 7 - .../sigstore/dist/util/asn1/tag.d.ts | 28 - .../sigstore/dist/util/crypto.d.ts | 10 - .../node_modules/sigstore/dist/util/crypto.js | 25 +- .../node_modules/sigstore/dist/util/dsse.d.ts | 2 - .../sigstore/dist/util/encoding.d.ts | 6 - .../sigstore/dist/util/index.d.ts | 9 - .../node_modules/sigstore/dist/util/index.js | 5 +- .../node_modules/sigstore/dist/util/json.d.ts | 1 - .../node_modules/sigstore/dist/util/oidc.d.ts | 1 - .../node_modules/sigstore/dist/util/pem.d.ts | 3 - .../sigstore/dist/util/promise.d.ts | 1 - .../sigstore/dist/util/promise.js | 27 - .../sigstore/dist/util/stream.d.ts | 24 - .../node_modules/sigstore/dist/util/ua.d.ts | 1 - .../node_modules/sigstore/dist/verify.d.ts | 13 - deps/npm/node_modules/sigstore/dist/verify.js | 18 +- .../node_modules/sigstore/dist/x509/cert.d.ts | 48 - .../node_modules/sigstore/dist/x509/ext.d.ts | 42 - .../node_modules/sigstore/dist/x509/sct.d.ts | 26 - .../sigstore/dist/x509/verify.d.ts | 8 - deps/npm/node_modules/sigstore/package.json | 19 +- deps/npm/node_modules/ssri/package.json | 8 +- .../tar/node_modules/minipass/LICENSE | 15 + .../tar/node_modules/minipass/index.js | 702 ++++++++++ .../tar/node_modules/minipass/index.mjs | 700 ++++++++++ .../tar/node_modules/minipass/package.json | 76 ++ deps/npm/node_modules/tuf-js/dist/config.js | 3 +- deps/npm/node_modules/tuf-js/dist/fetcher.js | 4 +- deps/npm/node_modules/tuf-js/dist/updater.js | 4 +- deps/npm/node_modules/tuf-js/package.json | 15 +- deps/npm/node_modules/which/lib/index.js | 10 +- .../which/node_modules/isexe/LICENSE | 15 + .../node_modules/isexe/dist/cjs/index.js | 46 + .../node_modules/isexe/dist/cjs/options.js | 3 + .../node_modules/isexe/dist/cjs/package.json | 3 + .../node_modules/isexe/dist/cjs/posix.js | 67 + .../node_modules/isexe/dist/cjs/win32.js | 62 + .../node_modules/isexe/dist/mjs/index.js | 16 + .../node_modules/isexe/dist/mjs/options.js | 2 + .../node_modules/isexe/dist/mjs/package.json | 3 + .../node_modules/isexe/dist/mjs/posix.js | 62 + .../node_modules/isexe/dist/mjs/win32.js | 57 + .../which/node_modules/isexe/package.json | 96 ++ deps/npm/node_modules/which/package.json | 16 +- deps/npm/package.json | 80 +- .../test/lib/commands/config.js.test.cjs | 8 +- .../test/lib/commands/doctor.js.test.cjs | 6 +- .../test/lib/commands/search.js.test.cjs | 8 + .../tap-snapshots/test/lib/docs.js.test.cjs | 34 - .../test/lib/utils/exit-handler.js.test.cjs | 1 + .../fixtures/libnpmsearch-stream-result.js | 7 + deps/npm/test/fixtures/sandbox.js | 11 - deps/npm/test/lib/commands/audit.js | 40 +- deps/npm/test/lib/commands/run-script.js | 7 +- deps/npm/test/lib/utils/exit-handler.js | 13 +- 509 files changed, 23728 insertions(+), 4061 deletions(-) create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/agents.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/dns.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/errors.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/index.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/options.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/proxy.js create mode 100644 deps/npm/node_modules/@npmcli/agent/lib/util.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json rename deps/npm/node_modules/{depd => @npmcli/agent/node_modules/http-proxy-agent}/LICENSE (92%) create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js create mode 100644 deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json create mode 100644 deps/npm/node_modules/@npmcli/agent/package.json create mode 100644 deps/npm/node_modules/@sigstore/bundle/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/bundle/dist/build.js create mode 100644 deps/npm/node_modules/@sigstore/bundle/dist/bundle.js create mode 100644 deps/npm/node_modules/@sigstore/bundle/dist/error.js create mode 100644 deps/npm/node_modules/@sigstore/bundle/dist/index.js create mode 100644 deps/npm/node_modules/@sigstore/bundle/dist/serialized.js rename deps/npm/node_modules/{sigstore/dist/identity/provider.js => @sigstore/bundle/dist/utility.js} (100%) rename deps/npm/node_modules/{sigstore/dist/types/sigstore => @sigstore/bundle/dist}/validate.js (53%) create mode 100644 deps/npm/node_modules/@sigstore/bundle/package.json create mode 100644 deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js create mode 100644 deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js create mode 100644 deps/npm/node_modules/@sigstore/sign/LICENSE create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/error.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/external/error.js rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/external/fulcio.js (95%) rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/external/rekor.js (94%) rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/external/tsa.js (97%) rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/identity/ci.js (90%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/identity/index.js rename deps/npm/node_modules/{sigstore/dist/types/sigstore/serialized.js => @sigstore/sign/dist/identity/provider.js} (100%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/signer/index.js rename deps/npm/node_modules/{sigstore/bin/sigstore.js => @sigstore/sign/dist/signer/signer.js} (82%) mode change 100755 => 100644 create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/json.js rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/util/oidc.js (98%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/util/pem.js rename deps/npm/node_modules/{sigstore => @sigstore/sign}/dist/util/ua.js (97%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/witness/index.js rename deps/npm/node_modules/{sigstore/dist/tlog/index.js => @sigstore/sign/dist/witness/tlog/client.js} (54%) rename deps/npm/node_modules/{sigstore/dist/tlog/format.js => @sigstore/sign/dist/witness/tlog/entry.js} (52%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js rename deps/npm/node_modules/{sigstore/dist/tsa/index.js => @sigstore/sign/dist/witness/tsa/client.js} (74%) create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js create mode 100644 deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js create mode 100644 deps/npm/node_modules/@sigstore/sign/package.json delete mode 100644 deps/npm/node_modules/depd/History.md delete mode 100644 deps/npm/node_modules/depd/index.js delete mode 100644 deps/npm/node_modules/depd/lib/browser/index.js delete mode 100644 deps/npm/node_modules/depd/package.json rename deps/npm/node_modules/{path-scurry/node_modules => }/lru-cache/dist/cjs/index.js (98%) create mode 100644 deps/npm/node_modules/lru-cache/dist/cjs/index.min.js rename deps/npm/node_modules/{path-scurry/node_modules => }/lru-cache/dist/cjs/package.json (100%) rename deps/npm/node_modules/{path-scurry/node_modules => }/lru-cache/dist/mjs/index.js (98%) create mode 100644 deps/npm/node_modules/lru-cache/dist/mjs/index.min.js rename deps/npm/node_modules/{path-scurry/node_modules => }/lru-cache/dist/mjs/package.json (100%) create mode 100644 deps/npm/node_modules/minipass/dist/cjs/index.js create mode 100644 deps/npm/node_modules/minipass/dist/cjs/package.json create mode 100644 deps/npm/node_modules/minipass/dist/mjs/index.js create mode 100644 deps/npm/node_modules/minipass/dist/mjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map create mode 100755 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js.map create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/package.json rename deps/npm/node_modules/{path-scurry => node-gyp}/node_modules/lru-cache/LICENSE (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/lru-cache/index.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/lru-cache/index.mjs (100%) rename deps/npm/node_modules/{path-scurry => node-gyp}/node_modules/lru-cache/package.json (55%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE rename deps/npm/node_modules/{ => node-gyp/node_modules}/make-fetch-happen/lib/agent.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/make-fetch-happen/lib/dns.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE rename deps/npm/node_modules/{ => node-gyp/node_modules}/minipass/index.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/minipass/index.mjs (99%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass/package.json delete mode 100644 deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js delete mode 100644 deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js delete mode 100644 deps/npm/node_modules/sigstore/README.md delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/format.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/format.js delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/index.js delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/cli/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/cli/index.js delete mode 100644 deps/npm/node_modules/sigstore/dist/config.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/error.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/external/error.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/external/error.js delete mode 100644 deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/external/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/external/index.js delete mode 100644 deps/npm/node_modules/sigstore/dist/external/rekor.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/external/tsa.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/ci.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/index.js delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/issuer.js delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/oauth.js delete mode 100644 deps/npm/node_modules/sigstore/dist/identity/provider.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/sign.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/sign.js delete mode 100644 deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/sigstore-utils.js delete mode 100644 deps/npm/node_modules/sigstore/dist/sigstore.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/format.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts create mode 100644 deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/tsa/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/fetch.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/signature.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/signature.js create mode 100644 deps/npm/node_modules/sigstore/dist/types/sigstore.js delete mode 100644 deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/sigstore/index.js delete mode 100644 deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/types/utility.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/crypto.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/dsse.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/encoding.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/index.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/json.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/oidc.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/pem.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/promise.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/promise.js delete mode 100644 deps/npm/node_modules/sigstore/dist/util/stream.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/util/ua.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/verify.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/x509/cert.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/x509/ext.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/x509/sct.d.ts delete mode 100644 deps/npm/node_modules/sigstore/dist/x509/verify.d.ts create mode 100644 deps/npm/node_modules/tar/node_modules/minipass/LICENSE create mode 100644 deps/npm/node_modules/tar/node_modules/minipass/index.js create mode 100644 deps/npm/node_modules/tar/node_modules/minipass/index.mjs create mode 100644 deps/npm/node_modules/tar/node_modules/minipass/package.json create mode 100644 deps/npm/node_modules/which/node_modules/isexe/LICENSE create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js create mode 100644 deps/npm/node_modules/which/node_modules/isexe/package.json diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 5a8056f18a8589..3fd67ec372fd89 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@9.8.1 /path/to/npm +npm@10.0.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm-pkg.md b/deps/npm/docs/content/commands/npm-pkg.md index 79f2e9647eecd2..f668c562affd31 100644 --- a/deps/npm/docs/content/commands/npm-pkg.md +++ b/deps/npm/docs/content/commands/npm-pkg.md @@ -269,5 +269,4 @@ This value is not exported to the environment for child processes. * [npm install](/commands/npm-install) * [npm init](/commands/npm-init) * [npm config](/commands/npm-config) -* [npm set-script](/commands/npm-set-script) * [workspaces](/using-npm/workspaces) diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 05d229bd3f6dcb..5923d67d48103d 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -9.8.1 +10.0.0 ### Description diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index 28f0ad26bf8e6c..630ad453196a0a 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -93,8 +93,10 @@ It should look like this: ```json { - "url" : "https://github.com/owner/project/issues", - "email" : "project@hostname.com" + "bugs": { + "url": "https://github.com/owner/project/issues", + "email": "project@hostname.com" + } } ``` @@ -285,6 +287,7 @@ Certain files are always included, regardless of settings: * `README` * `LICENSE` / `LICENCE` * The file in the "main" field +* The file(s) in the "bin" field `README` & `LICENSE` can have any case and extension. diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md index 9d1f02c42639e1..96b35edced7b3f 100644 --- a/deps/npm/docs/content/using-npm/config.md +++ b/deps/npm/docs/content/using-npm/config.md @@ -1775,20 +1775,6 @@ registry-scoped "certfile" path like -#### `ci-name` - -* Default: The name of the current CI system, or `null` when not on a known CI - platform. -* Type: null or String -* DEPRECATED: This config is deprecated and will not be changeable in future - version of npm. - -The name of a continuous integration system. If not set explicitly, npm will -detect the current CI environment using the -[`ci-info`](http://npm.im/ci-info) module. - - - #### `dev` * Default: false @@ -1949,20 +1935,6 @@ Alias for --package-lock -#### `tmp` - -* Default: The value returned by the Node.js `os.tmpdir()` method - -* Type: Path -* DEPRECATED: This setting is no longer used. npm stores temporary files in a - special location in the cache, and they are managed by - [`cacache`](http://npm.im/cacache). - -Historically, the location where temporary files were stored. No longer -relevant. - - - ### See also * [npm config](/commands/npm-config) diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 9be585ffa291ee..ba3ecd4d17706a 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

Description

the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

-
npm@9.8.1 /path/to/npm
+
npm@10.0.0 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 
diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html index 0a0b84107b7587..d60ca0b0f4aac7 100644 --- a/deps/npm/docs/output/commands/npm-pkg.html +++ b/deps/npm/docs/output/commands/npm-pkg.html @@ -346,7 +346,6 @@

See Also

  • npm install
  • npm init
  • npm config
  • -
  • npm set-script
  • workspaces
  • diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 41932731f72d39..ab1af49fb59e95 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -150,7 +150,7 @@

    Table of contents

    Note: This command is unaware of workspaces.

    Version

    -

    9.8.1

    +

    10.0.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index 64e062223626a4..712708ef406391 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -211,8 +211,10 @@

    bugs

    issues with your package.

    It should look like this:

    {
    -  "url" : "https://github.com/owner/project/issues",
    -  "email" : "project@hostname.com"
    +  "bugs": {
    +    "url": "https://github.com/owner/project/issues",
    +    "email": "project@hostname.com"
    +  }
     }
     

    You can specify either one or both values. If you want to provide only a @@ -359,6 +361,7 @@

    files

  • README
  • LICENSE / LICENCE
  • The file in the "main" field
  • +
  • The file(s) in the "bin" field
  • README & LICENSE can have any case and extension.

    Conversely, some files are always ignored:

    diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 440c4f29e97f10..0bb1634d1e5418 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -142,7 +142,7 @@

    config

    Table of contents

    -
    +

    Description

    @@ -1418,17 +1418,6 @@

    cert

    It is not the path to a certificate file, though you can set a registry-scoped "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem".

    -

    ci-name

    -
      -
    • Default: The name of the current CI system, or null when not on a known CI -platform.
    • -
    • Type: null or String
    • -
    • DEPRECATED: This config is deprecated and will not be changeable in future -version of npm.
    • -
    -

    The name of a continuous integration system. If not set explicitly, npm will -detect the current CI environment using the -ci-info module.

    dev

    • Default: false
    • @@ -1543,17 +1532,6 @@

      shrinkwrap

    • DEPRECATED: Use the --package-lock setting instead.

    Alias for --package-lock

    -

    tmp

    -
      -
    • Default: The value returned by the Node.js os.tmpdir() method -https://nodejs.org/api/os.html#os_os_tmpdir
    • -
    • Type: Path
    • -
    • DEPRECATED: This setting is no longer used. npm stores temporary files in a -special location in the cache, and they are managed by -cacache.
    • -
    -

    Historically, the location where temporary files were stored. No longer -relevant.

    See also

    • npm config
    • diff --git a/deps/npm/lib/commands/audit.js b/deps/npm/lib/commands/audit.js index 500620f2cd01bd..de5483109d598e 100644 --- a/deps/npm/lib/commands/audit.js +++ b/deps/npm/lib/commands/audit.js @@ -4,7 +4,7 @@ const localeCompare = require('@isaacs/string-locale-compare')('en') const npa = require('npm-package-arg') const pacote = require('pacote') const pMap = require('p-map') -const { sigstore } = require('sigstore') +const tufClient = require('@sigstore/tuf') const ArboristWorkspaceCmd = require('../arborist-cmd.js') const auditError = require('../utils/audit-error.js') @@ -38,8 +38,8 @@ class VerifySignatures { throw new Error('found no installed dependencies to audit') } - const tuf = await sigstore.tuf.client({ - tufCachePath: this.opts.tufCache, + const tuf = await tufClient.initTUF({ + cachePath: this.opts.tufCache, retry: this.opts.retry, timeout: this.opts.timeout, }) diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js index 13efdde750a825..75f00a46b84e9f 100644 --- a/deps/npm/lib/commands/run-script.js +++ b/deps/npm/lib/commands/run-script.js @@ -207,24 +207,10 @@ class RunScript extends BaseCommand { log.error(err) log.error(` in workspace: ${pkg._id || pkg.name}`) log.error(` at location: ${workspacePath}`) - - const scriptMissing = err.message.startsWith('Missing script') - - // avoids exiting with error code in case there's scripts missing - // in some workspaces since other scripts might have succeeded - if (!scriptMissing) { - process.exitCode = 1 - } - - return scriptMissing + process.exitCode = 1 }) res.push(runResult) } - - // in case **all** tests are missing, then it should exit with error code - if (res.every(Boolean)) { - throw new Error(`Missing script: ${args[0]}`) - } } async listWorkspaces (args, filters) { diff --git a/deps/npm/lib/commands/search.js b/deps/npm/lib/commands/search.js index 5fb0a12bce1386..85ff7db2b78840 100644 --- a/deps/npm/lib/commands/search.js +++ b/deps/npm/lib/commands/search.js @@ -68,6 +68,10 @@ class Search extends BaseCommand { let anyOutput = false class FilterStream extends Minipass { + constructor () { + super({ objectMode: true }) + } + write (pkg) { if (filter(pkg, opts.include, opts.exclude)) { super.write(pkg) diff --git a/deps/npm/lib/utils/format-search-stream.js b/deps/npm/lib/utils/format-search-stream.js index 762dea90859d19..ed753c27aabc83 100644 --- a/deps/npm/lib/utils/format-search-stream.js +++ b/deps/npm/lib/utils/format-search-stream.js @@ -143,7 +143,7 @@ function highlightSearchTerms (str, terms) { function normalizePackage (data, opts) { return { name: ansiTrim(data.name), - description: ansiTrim(data.description), + description: ansiTrim(data.description ?? ''), author: data.maintainers.map((m) => `=${ansiTrim(m.username)}`).join(' '), keywords: Array.isArray(data.keywords) ? data.keywords.map(ansiTrim).join(' ') diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index b6266e1c49ba22..2593dd51e38f26 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "July 2023" "" "" +.TH "NPM-ACCESS" "1" "August 2023" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index c38b6251f94abe..7de144d726a746 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "July 2023" "" "" +.TH "NPM-ADDUSER" "1" "August 2023" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index 35fb73e57860c5..d17652e280d1e2 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "July 2023" "" "" +.TH "NPM-AUDIT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index 8fac5248b8a812..b09652e73bbab8 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "July 2023" "" "" +.TH "NPM-BUGS" "1" "August 2023" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index f16723a031d50b..3aa23197273f33 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "July 2023" "" "" +.TH "NPM-CACHE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index a8c641634a7a76..2764435bbeec7e 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "July 2023" "" "" +.TH "NPM-CI" "1" "August 2023" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index fbf5a68626d391..044de420ba8ad4 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "July 2023" "" "" +.TH "NPM-COMPLETION" "1" "August 2023" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index 77fd28de8f05f9..cd1138c20d7f7c 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "July 2023" "" "" +.TH "NPM-CONFIG" "1" "August 2023" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index bb97e329c50bce..4705eb5140a0a2 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "July 2023" "" "" +.TH "NPM-DEDUPE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index 36b97e30f0aa02..c6831d93ecd84b 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "July 2023" "" "" +.TH "NPM-DEPRECATE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 99bbc2fe01d0ca..7ded467b1198c2 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "July 2023" "" "" +.TH "NPM-DIFF" "1" "August 2023" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index 16ebcdbf0311d3..d592e6508f58a2 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "July 2023" "" "" +.TH "NPM-DIST-TAG" "1" "August 2023" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index 779b89157c4e81..23524867cee64b 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "July 2023" "" "" +.TH "NPM-DOCS" "1" "August 2023" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index a1eef652698146..11aa1eb02da26e 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "July 2023" "" "" +.TH "NPM-DOCTOR" "1" "August 2023" "" "" .SH "NAME" \fBnpm-doctor\fR - Check your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index ee7b76bd9d7dec..98c660d624fc9a 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "July 2023" "" "" +.TH "NPM-EDIT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 9e6af781c52267..fd6bc243de5c8c 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "July 2023" "" "" +.TH "NPM-EXEC" "1" "August 2023" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 9985e3cb482836..e986043503710b 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "July 2023" "" "" +.TH "NPM-EXPLAIN" "1" "August 2023" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index ee3ee6b7d64dc8..ea45239d095afd 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "July 2023" "" "" +.TH "NPM-EXPLORE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 9d373635de18be..4f0e08327d6578 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "July 2023" "" "" +.TH "NPM-FIND-DUPES" "1" "August 2023" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 794d0431b4ed79..5d574502589a58 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "July 2023" "" "" +.TH "NPM-FUND" "1" "August 2023" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 0f85ec27c96f65..f848af56068fa8 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "July 2023" "" "" +.TH "NPM-HELP-SEARCH" "1" "August 2023" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 9226fac417504f..4e8c5490d8716f 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "July 2023" "" "" +.TH "NPM-HELP" "1" "August 2023" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index df6ff9f56f0d66..5209a9949ea4ab 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "July 2023" "" "" +.TH "NPM-HOOK" "1" "August 2023" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 7a6722bea212f7..33f66faa9b0d6a 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM-INIT" "1" "July 2023" "" "" +.TH "NPM-INIT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 306c5e3e9b6895..c94907639d07a4 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "July 2023" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "August 2023" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 47dfcea404dcd3..40ba63ef57f44e 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "July 2023" "" "" +.TH "NPM-INSTALL-TEST" "1" "August 2023" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 1e93c6dba476e3..84dc7ba204e761 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "July 2023" "" "" +.TH "NPM-INSTALL" "1" "August 2023" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 9494902cf2f18a..6a4214c1cc2cab 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "July 2023" "" "" +.TH "NPM-LINK" "1" "August 2023" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index fb07b4981e49a9..11e6707e2d3115 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "July 2023" "" "" +.TH "NPM-LOGIN" "1" "August 2023" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 9ee817a430f1fd..803dedd52a96bc 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "July 2023" "" "" +.TH "NPM-LOGOUT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index af399edb102b6f..2561137d9091b1 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "July 2023" "" "" +.TH "NPM-LS" "1" "August 2023" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@9.8.1 /path/to/npm +npm@10.0.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index f4584893ab84da..56b7c5b6f830dd 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "July 2023" "" "" +.TH "NPM-ORG" "1" "August 2023" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index 0c5d218eaa3526..e95ebea296a0f1 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "July 2023" "" "" +.TH "NPM-OUTDATED" "1" "August 2023" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index b2d19405e83ca5..4e5c11e4667b64 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "July 2023" "" "" +.TH "NPM-OWNER" "1" "August 2023" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index 38869efc2e3f26..a4c9c43c0bc796 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "July 2023" "" "" +.TH "NPM-PACK" "1" "August 2023" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index fdbc131fba1438..5508a054767855 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "July 2023" "" "" +.TH "NPM-PING" "1" "August 2023" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 806a5ae62bac31..0ab2b402a0cedf 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "July 2023" "" "" +.TH "NPM-PKG" "1" "August 2023" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" @@ -282,7 +282,5 @@ npm help init .IP \(bu 4 npm help config .IP \(bu 4 -npm help set-script -.IP \(bu 4 npm help workspaces .RE 0 diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index 764e9b9dc31ff4..e7b4a7ebe16570 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "July 2023" "" "" +.TH "NPM-PREFIX" "1" "August 2023" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 47c7d2eca05841..cb061712872765 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "July 2023" "" "" +.TH "NPM-PROFILE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index fd4492f40845ed..8936f1801cb34c 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "July 2023" "" "" +.TH "NPM-PRUNE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 888977f67626f3..1437b97676d163 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "July 2023" "" "" +.TH "NPM-PUBLISH" "1" "August 2023" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index e8bda254f3e19c..6bd12878bcf78f 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "July 2023" "" "" +.TH "NPM-QUERY" "1" "August 2023" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 4d7644fa5dabaa..5ce8aa959e5d2d 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "July 2023" "" "" +.TH "NPM-REBUILD" "1" "August 2023" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 233ae1c8def62b..3a838540039e2d 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "July 2023" "" "" +.TH "NPM-REPO" "1" "August 2023" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index 5df4da83185575..2500c91e698adf 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "July 2023" "" "" +.TH "NPM-RESTART" "1" "August 2023" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 9d7f65b1eaf15e..520835c18200de 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "July 2023" "" "" +.TH "NPM-ROOT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index 2b458fc495568e..93031cfe89db60 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "July 2023" "" "" +.TH "NPM-RUN-SCRIPT" "1" "August 2023" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 30a9e7c0f9371b..7c43d3bd115de3 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "July 2023" "" "" +.TH "NPM-SEARCH" "1" "August 2023" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 932c73fa284ff4..99a04c82b0b148 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "July 2023" "" "" +.TH "NPM-SHRINKWRAP" "1" "August 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index 83bec1215ce124..f6a3784a70c0db 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "July 2023" "" "" +.TH "NPM-STAR" "1" "August 2023" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 7651506c9135ee..1d22e099bc362a 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "July 2023" "" "" +.TH "NPM-STARS" "1" "August 2023" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 6d3fa76cd86681..0bee36d08e2a9d 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "July 2023" "" "" +.TH "NPM-START" "1" "August 2023" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 54611e36b08633..ffee802a0ad888 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "July 2023" "" "" +.TH "NPM-STOP" "1" "August 2023" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index 7b806f4412061b..cd46af5e7cc054 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "July 2023" "" "" +.TH "NPM-TEAM" "1" "August 2023" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index 5e02ed40cdb14e..df03feb04c8c88 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "July 2023" "" "" +.TH "NPM-TEST" "1" "August 2023" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 242c82f2feb528..5a845534a1a61a 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "July 2023" "" "" +.TH "NPM-TOKEN" "1" "August 2023" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index d89488ffc91a22..c4de8421fec7ec 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "July 2023" "" "" +.TH "NPM-UNINSTALL" "1" "August 2023" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index faa9bd23baf2c9..68d841c8f55c14 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "July 2023" "" "" +.TH "NPM-UNPUBLISH" "1" "August 2023" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index 157e7f53fcbede..a129bc500c9248 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "July 2023" "" "" +.TH "NPM-UNSTAR" "1" "August 2023" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index c72b717593f356..e300b680de6f47 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "July 2023" "" "" +.TH "NPM-UPDATE" "1" "August 2023" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index 482727246288a4..c2f002aa203b49 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "July 2023" "" "" +.TH "NPM-VERSION" "1" "August 2023" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index ea6fdfeb518b2d..3293e6a288d225 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "July 2023" "" "" +.TH "NPM-VIEW" "1" "August 2023" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 799d85fc0275e3..0c105ee38f9afd 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "July 2023" "" "" +.TH "NPM-WHOAMI" "1" "August 2023" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index cbb25b2aa1a32d..af425bdf4b9901 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "July 2023" "" "" +.TH "NPM" "1" "August 2023" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -9.8.1 +10.0.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index f1c9b4cbf676ca..61b9737c330117 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "July 2023" "" "" +.TH "NPX" "1" "August 2023" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index 3661e0bbbab59d..2acf702d6081d7 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "July 2023" "" "" +.TH "FOLDERS" "5" "August 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index efbbdccbba07d8..100b4755a12026 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "July 2023" "" "" +.TH "INSTALL" "5" "August 2023" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 3661e0bbbab59d..2acf702d6081d7 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "July 2023" "" "" +.TH "FOLDERS" "5" "August 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index f1e5784ada7682..033a8810e7c85a 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "July 2023" "" "" +.TH "PACKAGE.JSON" "5" "August 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -70,8 +70,10 @@ It should look like this: .RS 2 .nf { - "url" : "https://github.com/owner/project/issues", - "email" : "project@hostname.com" + "bugs": { + "url": "https://github.com/owner/project/issues", + "email": "project@hostname.com" + } } .fi .RE @@ -254,6 +256,8 @@ Certain files are always included, regardless of settings: \fBLICENSE\fR / \fBLICENCE\fR .IP \(bu 4 The file in the "main" field +.IP \(bu 4 +The file(s) in the "bin" field .RE 0 .P diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index cf3e37e92253d2..edea5989cb78b7 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "July 2023" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "August 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 7b222d3736b02d..5f16942e165bf8 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "July 2023" "" "" +.TH "NPMRC" "5" "August 2023" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index f1e5784ada7682..033a8810e7c85a 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "July 2023" "" "" +.TH "PACKAGE.JSON" "5" "August 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -70,8 +70,10 @@ It should look like this: .RS 2 .nf { - "url" : "https://github.com/owner/project/issues", - "email" : "project@hostname.com" + "bugs": { + "url": "https://github.com/owner/project/issues", + "email": "project@hostname.com" + } } .fi .RE @@ -254,6 +256,8 @@ Certain files are always included, regardless of settings: \fBLICENSE\fR / \fBLICENCE\fR .IP \(bu 4 The file in the "main" field +.IP \(bu 4 +The file(s) in the "bin" field .RE 0 .P diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 82435a461b88a6..85f42563387145 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "July 2023" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "August 2023" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 3bcd6075cc2235..43d874f9710cb2 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "July 2023" "" "" +.TH "CONFIG" "7" "August 2023" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" @@ -1769,18 +1769,6 @@ cert="-----BEGIN CERTIFICATE-----\[rs]nXXXX\[rs]nXXXX\[rs]n-----END CERTIFICATE- .RE .P It is \fInot\fR the path to a certificate file, though you can set a registry-scoped "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem". -.SS "\fBci-name\fR" -.RS 0 -.IP \(bu 4 -Default: The name of the current CI system, or \fBnull\fR when not on a known CI platform. -.IP \(bu 4 -Type: null or String -.IP \(bu 4 -DEPRECATED: This config is deprecated and will not be changeable in future version of npm. -.RE 0 - -.P -The name of a continuous integration system. If not set explicitly, npm will detect the current CI environment using the \fB\fBci-info\fR\fR \fI\(lahttp://npm.im/ci-info\(ra\fR module. .SS "\fBdev\fR" .RS 0 .IP \(bu 4 @@ -1959,18 +1947,6 @@ DEPRECATED: Use the --package-lock setting instead. .P Alias for --package-lock -.SS "\fBtmp\fR" -.RS 0 -.IP \(bu 4 -Default: The value returned by the Node.js \fBos.tmpdir()\fR method \fI\(lahttps://nodejs.org/api/os.html#os_os_tmpdir\(ra\fR -.IP \(bu 4 -Type: Path -.IP \(bu 4 -DEPRECATED: This setting is no longer used. npm stores temporary files in a special location in the cache, and they are managed by \fB\fBcacache\fR\fR \fI\(lahttp://npm.im/cacache\(ra\fR. -.RE 0 - -.P -Historically, the location where temporary files were stored. No longer relevant. .SS "See also" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index 8e557efe93853a..c64631e02b22fd 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "July 2023" "" "" +.TH "QUERYING" "7" "August 2023" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index 788ed4b9d8d457..c447a49da07e48 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "July 2023" "" "" +.TH "DEVELOPERS" "7" "August 2023" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index 0c96f75c479453..e06928a041162e 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "July 2023" "" "" +.TH "LOGGING" "7" "August 2023" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index 2d0ec91b96774d..46fa4efe9b51e1 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "July 2023" "" "" +.TH "ORGS" "7" "August 2023" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 2d02001f93791b..8651c7aec26b5c 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "July 2023" "" "" +.TH "PACKAGE-SPEC" "7" "August 2023" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index 9b68a2a761543b..6811ccee1ff655 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "July 2023" "" "" +.TH "REGISTRY" "7" "August 2023" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 1ae685b6f126e3..ea31865f6b5985 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "July 2023" "" "" +.TH "REMOVAL" "7" "August 2023" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 04dc80fd662669..57b2d8edbdd180 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "July 2023" "" "" +.TH "SCOPE" "7" "August 2023" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 043b296f90baa5..bd119571b8f64d 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "July 2023" "" "" +.TH "SCRIPTS" "7" "August 2023" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 8f11ac9a9f67f5..ec1700b0496afd 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "July 2023" "" "" +.TH "WORKSPACES" "7" "August 2023" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..db997403f75794 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,201 @@ +'use strict' + +const http = require('http') +const https = require('https') +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const { createTimeout, abortRace, urlify, appendPort, cacheAgent } = require('./util') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyType, isSecureProxy, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') + +const createAgent = (base, name) => { + const SECURE = base === https + const SOCKET_TYPE = SECURE ? tls : net + + const agent = class extends base.Agent { + #options + #timeouts + #proxy + #socket + + constructor (_options) { + const { timeouts, proxy, noProxy, ...options } = normalizeOptions(_options) + + super(options) + + this.#options = options + this.#timeouts = timeouts + this.#proxy = proxy ? { proxies: getProxyType(proxy), proxy: urlify(proxy), noProxy } : null + } + + get proxy () { + return this.#proxy ? { url: this.#proxy.proxy } : {} + } + + #getProxy (options) { + const proxy = this.#proxy + ? getProxy(appendPort(`${options.protocol}//${options.host}`, options.port), this.#proxy) + : null + + if (!proxy) { + return + } + + const secure = isSecureProxy(proxy) + + return cacheAgent({ + key: cacheOptions({ + ...options, + ...this.#options, + secure, + timeouts: this.#timeouts, + proxy, + }), + cache: proxyCache, + secure, + proxies: this.#proxy.proxies, + }, proxy, this.#options) + } + + #setKeepAlive (socket) { + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + } + + #setIdleTimeout (socket, options) { + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(options)) + }) + } + } + + async #proxyConnect (proxy, request, options) { + // socks-proxy-agent accepts a dns lookup function + options.lookup ??= this.#options.lookup + + // all the proxy agents use this secureEndpoint option to determine + // if the proxy should connect over tls or not. we can set it based + // on if the HttpAgent or HttpsAgent is used. + options.secureEndpoint = SECURE + + const socket = await abortRace([ + (ac) => createTimeout(this.#timeouts.connection, ac).catch(() => { + throw new Errors.ConnectionTimeoutError(options) + }), + (ac) => proxy.connect(request, options).then((s) => { + this.#setKeepAlive(s) + + const connectEvent = SECURE ? 'secureConnect' : 'connect' + const connectingEvent = SECURE ? 'secureConnecting' : 'connecting' + + if (!s[connectingEvent]) { + return s + } + + return abortRace([ + () => once(s, 'error', ac).then((err) => { + throw err + }), + () => once(s, connectEvent, ac).then(() => s), + ], ac) + }), + ]) + + this.#setIdleTimeout(socket, options) + + return socket + } + + async connect (request, options) { + const proxy = this.#getProxy(options) + if (proxy) { + return this.#proxyConnect(proxy, request, options) + } + + const socket = SOCKET_TYPE.connect(options) + + this.#setKeepAlive(socket) + + await abortRace([ + (s) => createTimeout(this.#timeouts.connection, s).catch(() => { + throw new Errors.ConnectionTimeoutError(options) + }), + (s) => once(socket, 'error', s).then((err) => { + throw err + }), + (s) => once(socket, 'connect', s), + ]) + + this.#setIdleTimeout(socket, options) + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + const responseTimeout = createTimeout(this.#timeouts.response) + if (responseTimeout) { + request.once('finish', () => { + responseTimeout.start(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.proxy?.url)) + }) + }) + request.once('response', () => { + responseTimeout.clear() + }) + } + + const transferTimeout = createTimeout(this.#timeouts.transfer) + if (transferTimeout) { + request.once('response', (res) => { + transferTimeout.start(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.proxy?.url)) + }) + res.once('close', () => { + transferTimeout.clear() + }) + }) + } + + return super.addRequest(request, options) + } + + createSocket (req, options, cb) { + return Promise.resolve() + .then(() => this.connect(req, options)) + .then((socket) => { + this.#socket = socket + return super.createSocket(req, options, cb) + }, cb) + } + + createConnection () { + return this.#socket + } + } + + Object.defineProperty(agent, 'name', { value: name }) + return agent +} + +module.exports = { + HttpAgent: createAgent(http, 'HttpAgent'), + HttpsAgent: createAgent(https, 'HttpsAgent'), +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..f41b4a065d713e --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,65 @@ +'use strict' + +const { appendPort } = require('./util') + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor ({ host, port }) { + host = appendPort(host, port) + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor ({ host, port }) { + host = appendPort(host, port) + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..2cd69390ea77e9 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,46 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { urlify, cacheAgent } = require('./util') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const { HttpAgent, HttpsAgent } = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent: _agent, proxy: _proxy, noProxy, ..._options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (_agent != null) { + return _agent + } + + url = urlify(url) + + const secure = url.protocol === 'https:' + const proxy = getProxy(url, { proxy: _proxy, noProxy }) + const options = { ...normalizeOptions(_options), proxy } + + return cacheAgent({ + key: cacheOptions({ ...options, secure }), + cache: agentCache, + secure, + proxies: [HttpAgent, HttpsAgent], + }, options) +} + +module.exports = { + getAgent, + HttpAgent, + HttpsAgent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..cd87c09d6a25ad --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,74 @@ +'use strict' + +const dns = require('./dns') +const { createKey } = require('./util') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const cacheOptions = (options) => { + const { secure } = options + return createKey({ + secure: !!secure, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secure ? !!options.rejectUnauthorized : false, + ca: secure ? options.ca : null, + cert: secure ? options.cert : null, + key: secure ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, + }) +} + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..81afdad74c1e56 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,96 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') +const { urlify } = require('./util.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const PROXY_ENV = (() => { + const keys = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + const values = {} + for (let [key, value] of Object.entries(process.env)) { + key = key.toLowerCase() + if (keys.has(key)) { + values[key] = value + } + } + return values +})() + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) +const SECURE_PROTOCOLS = new Set([...SocksProxyAgent.protocols, 'https']) + +const isSecureProxy = (url) => { + url = urlify(url) + const protocol = url.protocol.slice(0, -1) + return SECURE_PROTOCOLS.has(protocol) +} + +const getProxyType = (url) => { + url = urlify(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return [SocksProxyAgent] + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { + proxy = PROXY_ENV.https_proxy, + noProxy = PROXY_ENV.no_proxy, +}) => { + url = urlify(url) + + if (!proxy && url.protocol !== 'https:') { + proxy = PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return urlify(proxy) +} + +module.exports = { + getProxyType, + getProxy, + isSecureProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/@npmcli/agent/lib/util.js b/deps/npm/node_modules/@npmcli/agent/lib/util.js new file mode 100644 index 00000000000000..6d42a2e202c1f9 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/lib/util.js @@ -0,0 +1,84 @@ +'use strict' + +const timers = require('timers/promises') + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const createTimeout = (delay, signal) => { + if (!delay) { + return signal ? new Promise(() => {}) : null + } + + if (!signal) { + let timeout + return { + start: (cb) => (timeout = setTimeout(cb, delay)), + clear: () => clearTimeout(timeout), + } + } + + return timers.setTimeout(delay, null, signal) + .then(() => { + throw new Error() + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) +} + +const abortRace = async (promises, ac = new AbortController()) => { + let res + try { + res = await Promise.race(promises.map((p) => p(ac))) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return res +} + +const urlify = (url) => typeof url === 'string' ? new URL(url) : url + +const appendPort = (host, port) => { + // istanbul ignore next + if (port) { + host += `:${port}` + } + return host +} + +const cacheAgent = ({ key, cache, secure, proxies }, ...args) => { + if (cache.has(key)) { + return cache.get(key) + } + const Ctor = (secure ? proxies[1] : proxies[0]) ?? proxies[0] + const agent = new Ctor(...args) + cache.set(key, agent) + return agent +} + +module.exports = { + createKey, + createTimeout, + abortRace, + urlify, + cacheAgent, + appendPort, +} diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js new file mode 100644 index 00000000000000..ef3f92022d455d --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js @@ -0,0 +1,66 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js new file mode 100644 index 00000000000000..7bafc8c68604f3 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js @@ -0,0 +1,112 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Agent = void 0; +const http = __importStar(require("http")); +__exportStar(require("./helpers"), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, cb); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json new file mode 100644 index 00000000000000..7178f4983f4fb9 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json @@ -0,0 +1,49 @@ +{ + "name": "agent-base", + "version": "7.1.0", + "description": "Turn a function into an `http.Agent` instance", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/agent-base" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "devDependencies": { + "@types/debug": "^4.1.7", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "@types/semver": "^7.3.13", + "@types/ws": "^6.0.4", + "async-listen": "^3.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "ws": "^3.3.3", + "tsconfig": "0.0.0" + }, + "engines": { + "node": ">= 14" + }, + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail", + "lint": "eslint . --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/deps/npm/node_modules/depd/LICENSE b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE similarity index 92% rename from deps/npm/node_modules/depd/LICENSE rename to deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE index 248de7af2bd16c..aad14057fad570 100644 --- a/deps/npm/node_modules/depd/LICENSE +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE @@ -1,6 +1,9 @@ +License +------- + (The MIT License) -Copyright (c) 2014-2018 Douglas Christopher Wilson +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 00000000000000..4a7daf6156f941 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,147 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpProxyAgent = void 0; +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const debug_1 = __importDefault(require("debug")); +const events_1 = require("events"); +const agent_base_1 = require("agent-base"); +const debug = (0, debug_1.default)('http-proxy-agent'); +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + super.addRequest(req, opts); + } + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? 'https:' : 'http:'; + const hostname = req.getHeader('host') || 'localhost'; + const base = `${protocol}//${hostname}`; + const url = new URL(req.path, base); + if (opts.port !== 80) { + url.port = String(opts.port); + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = String(url); + // Inject the `Proxy-Authorization` header if necessary. + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } + } + } + async connect(req, opts) { + req._header = null; + if (!req.path.includes('://')) { + this.setRequestProps(req, opts); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + // Create a socket connection to the proxy server. + let socket; + if (this.proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + await (0, events_1.once)(socket, 'connect'); + return socket; + } +} +HttpProxyAgent.protocols = ['http', 'https']; +exports.HttpProxyAgent = HttpProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json new file mode 100644 index 00000000000000..08c650cbb22aa4 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json @@ -0,0 +1,47 @@ +{ + "name": "http-proxy-agent", + "version": "7.0.0", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/http-proxy-agent" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "devDependencies": { + "@types/debug": "^4.1.7", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "async-listen": "^3.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "proxy": "2.1.1", + "tsconfig": "0.0.0" + }, + "engines": { + "node": ">= 14" + }, + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail", + "lint": "eslint . --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 00000000000000..e3bbfe632c454d --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,170 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpsProxyAgent = void 0; +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const parse_proxy_response_1 = require("./parse-proxy-response"); +const debug = (0, debug_1.default)('https-proxy-agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 00000000000000..a28f1d811805f8 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,98 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(require("debug")); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered.slice(0, endOfHeaders).toString('ascii').split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json new file mode 100644 index 00000000000000..fc5f988d3b02bf --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json @@ -0,0 +1,50 @@ +{ + "name": "https-proxy-agent", + "version": "7.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/https-proxy-agent" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "devDependencies": { + "@types/async-retry": "^1.4.5", + "@types/debug": "4", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "async-listen": "^3.0.0", + "async-retry": "^1.3.3", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "proxy": "2.1.1", + "tsconfig": "0.0.0" + }, + "engines": { + "node": ">= 14" + }, + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail test/test.ts", + "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts", + "lint": "eslint --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js new file mode 100644 index 00000000000000..8189e014c13a0d --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js @@ -0,0 +1,181 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SocksProxyAgent = void 0; +const socks_1 = require("socks"); +const agent_base_1 = require("agent-base"); +const debug_1 = __importDefault(require("debug")); +const dns = __importStar(require("dns")); +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const debug = (0, debug_1.default)('socks-proxy-agent'); +function parseSocksURL(url) { + let lookup = false; + let type = 5; + const host = url.hostname; + // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3 + // "The SOCKS service is conventionally located on TCP port 1080" + const port = parseInt(url.port, 10) || 1080; + // figure out if we want socks v4 or v5, based on the "protocol" used. + // Defaults to 5. + switch (url.protocol.replace(':', '')) { + case 'socks4': + lookup = true; + type = 4; + break; + // pass through + case 'socks4a': + type = 4; + break; + case 'socks5': + lookup = true; + type = 5; + break; + // pass through + case 'socks': // no version specified, default to 5h + type = 5; + break; + case 'socks5h': + type = 5; + break; + default: + throw new TypeError(`A "socks" protocol must be specified! Got: ${String(url.protocol)}`); + } + const proxy = { + host, + port, + type, + }; + if (url.username) { + Object.defineProperty(proxy, 'userId', { + value: decodeURIComponent(url.username), + enumerable: false, + }); + } + if (url.password != null) { + Object.defineProperty(proxy, 'password', { + value: decodeURIComponent(url.password), + enumerable: false, + }); + } + return { lookup, proxy }; +} +class SocksProxyAgent extends agent_base_1.Agent { + constructor(uri, opts) { + super(opts); + const url = typeof uri === 'string' ? new URL(uri) : uri; + const { proxy, lookup } = parseSocksURL(url); + this.shouldLookup = lookup; + this.proxy = proxy; + this.timeout = opts?.timeout ?? null; + } + /** + * Initiates a SOCKS connection to the specified SOCKS proxy server, + * which in turn connects to the specified remote host and port. + */ + async connect(req, opts) { + const { shouldLookup, proxy, timeout } = this; + if (!opts.host) { + throw new Error('No `host` defined!'); + } + let { host } = opts; + const { port, lookup: lookupFn = dns.lookup } = opts; + if (shouldLookup) { + // Client-side DNS resolution for "4" and "5" socks proxy versions. + host = await new Promise((resolve, reject) => { + // Use the request's custom lookup, if one was configured: + lookupFn(host, {}, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + }); + } + const socksOpts = { + proxy, + destination: { + host, + port: typeof port === 'number' ? port : parseInt(port, 10), + }, + command: 'connect', + timeout: timeout ?? undefined, + }; + const cleanup = (tlsSocket) => { + req.destroy(); + socket.destroy(); + if (tlsSocket) + tlsSocket.destroy(); + }; + debug('Creating socks proxy connection: %o', socksOpts); + const { socket } = await socks_1.SocksClient.createConnection(socksOpts); + debug('Successfully created socks proxy connection'); + if (timeout !== null) { + socket.setTimeout(timeout); + socket.on('timeout', () => cleanup()); + } + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + const tlsSocket = tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + tlsSocket.once('error', (error) => { + debug('Socket TLS error', error.message); + cleanup(tlsSocket); + }); + return tlsSocket; + } + return socket; + } +} +SocksProxyAgent.protocols = [ + 'socks', + 'socks4', + 'socks4a', + 'socks5', + 'socks5h', +]; +exports.SocksProxyAgent = SocksProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json new file mode 100644 index 00000000000000..a6c7c0741641a1 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json @@ -0,0 +1,142 @@ +{ + "name": "socks-proxy-agent", + "version": "8.0.1", + "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "author": { + "email": "nathan@tootallnate.net", + "name": "Nathan Rajlich", + "url": "http://n8.io/" + }, + "contributors": [ + { + "name": "Kiko Beats", + "email": "josefrancisco.verdu@gmail.com" + }, + { + "name": "Josh Glazebrook", + "email": "josh@joshglazebrook.com" + }, + { + "name": "talmobi", + "email": "talmobi@users.noreply.github.com" + }, + { + "name": "Indospace.io", + "email": "justin@indospace.io" + }, + { + "name": "Kilian von Pflugk", + "email": "github@jumoog.io" + }, + { + "name": "Kyle", + "email": "admin@hk1229.cn" + }, + { + "name": "Matheus Fernandes", + "email": "matheus.frndes@gmail.com" + }, + { + "name": "Ricky Miller", + "email": "richardkazuomiller@gmail.com" + }, + { + "name": "Shantanu Sharma", + "email": "shantanu34@outlook.com" + }, + { + "name": "Tim Perry", + "email": "pimterry@gmail.com" + }, + { + "name": "Vadim Baryshev", + "email": "vadimbaryshev@gmail.com" + }, + { + "name": "jigu", + "email": "luo1257857309@gmail.com" + }, + { + "name": "Alba Mendez", + "email": "me@jmendeth.com" + }, + { + "name": "Дмитрий Гуденков", + "email": "Dimangud@rambler.ru" + }, + { + "name": "Andrei Bitca", + "email": "63638922+andrei-bitca-dc@users.noreply.github.com" + }, + { + "name": "Andrew Casey", + "email": "amcasey@users.noreply.github.com" + }, + { + "name": "Brandon Ros", + "email": "brandonros1@gmail.com" + }, + { + "name": "Dang Duy Thanh", + "email": "thanhdd.it@gmail.com" + }, + { + "name": "Dimitar Nestorov", + "email": "8790386+dimitarnestorov@users.noreply.github.com" + } + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/socks-proxy-agent" + }, + "keywords": [ + "agent", + "http", + "https", + "proxy", + "socks", + "socks4", + "socks4a", + "socks5", + "socks5h" + ], + "dependencies": { + "agent-base": "^7.0.1", + "debug": "^4.3.4", + "socks": "^2.7.1" + }, + "devDependencies": { + "@types/async-retry": "^1.4.5", + "@types/debug": "^4.1.7", + "@types/dns2": "^2.0.3", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "async-listen": "^2.1.0", + "async-retry": "^1.3.3", + "cacheable-lookup": "^6.1.0", + "dns2": "^2.1.0", + "jest": "^29.5.0", + "socksv5": "github:TooTallNate/socksv5#fix/dstSock-close-event", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "tsconfig": "0.0.0", + "proxy": "2.0.1" + }, + "engines": { + "node": ">= 14" + }, + "license": "MIT", + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail test/test.ts", + "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts", + "lint": "eslint . --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/deps/npm/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..c0bf65719db9a6 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/agent/package.json @@ -0,0 +1,64 @@ +{ + "name": "@npmcli/agent", + "version": "2.1.0", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + }, + "dependencies": { + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.1" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "simple-socks": "^2.2.2", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index a9ec27bacb0035..3b286e782149fc 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,39 +1,39 @@ { "name": "@npmcli/arborist", - "version": "6.3.0", + "version": "7.0.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/fs": "^3.1.0", "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^5.0.0", + "@npmcli/metavuln-calculator": "^7.0.0", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^4.0.0", + "@npmcli/package-json": "^5.0.0", "@npmcli/query": "^3.0.0", - "@npmcli/run-script": "^6.0.0", + "@npmcli/run-script": "^7.0.1", "bin-links": "^4.0.1", - "cacache": "^17.0.4", + "cacache": "^18.0.0", "common-ancestor-path": "^1.0.1", - "hosted-git-info": "^6.1.1", + "hosted-git-info": "^7.0.0", "json-parse-even-better-errors": "^3.0.0", "json-stringify-nice": "^1.1.4", "minimatch": "^9.0.0", "nopt": "^7.0.0", - "npm-install-checks": "^6.0.0", - "npm-package-arg": "^10.1.0", - "npm-pick-manifest": "^8.0.1", - "npm-registry-fetch": "^14.0.3", + "npm-install-checks": "^6.2.0", + "npm-package-arg": "^11.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^16.0.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^17.0.4", "parse-conflict-json": "^3.0.0", "proc-log": "^3.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^1.0.2", "read-package-json-fast": "^3.0.2", "semver": "^7.3.7", - "ssri": "^10.0.1", + "ssri": "^10.0.5", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, @@ -42,8 +42,8 @@ "@npmcli/template-oss": "4.18.0", "benchmark": "^2.1.4", "minify-registry-metadata": "^3.0.0", - "nock": "^13.3.0", - "tap": "^16.3.4", + "nock": "^13.3.3", + "tap": "^16.3.8", "tar-stream": "^3.0.0", "tcompare": "^5.0.6" }, @@ -79,7 +79,6 @@ "test-env": [ "LC_ALL=sk" ], - "color": 1, "timeout": "360", "nyc-arg": [ "--exclude", @@ -87,11 +86,17 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js index fe5cafa1922d9b..7f0edc7167a42c 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js +++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js @@ -64,7 +64,7 @@ const editor = process.env.EDITOR || const shell = isWindows ? process.env.ComSpec || 'cmd' : process.env.SHELL || 'sh' -const { tmpdir, networkInterfaces } = require('os') +const { networkInterfaces } = require('os') const getLocalAddresses = () => { try { return Object.values(networkInterfaces()).map( @@ -429,24 +429,6 @@ define('cert', { flatten, }) -define('ci-name', { - default: ciInfo.name ? ciInfo.name.toLowerCase().split(' ').join('-') : null, - defaultDescription: ` - The name of the current CI system, or \`null\` when not on a known CI - platform. - `, - type: [null, String], - deprecated: ` - This config is deprecated and will not be changeable in future version of npm. - `, - description: ` - The name of a continuous integration system. If not set explicitly, npm - will detect the current CI environment using the - [\`ci-info\`](http://npm.im/ci-info) module. - `, - flatten, -}) - define('cidr', { default: null, type: [null, String, Array], @@ -2127,24 +2109,6 @@ define('timing', { `, }) -define('tmp', { - default: tmpdir(), - defaultDescription: ` - The value returned by the Node.js \`os.tmpdir()\` method - - `, - type: path, - deprecated: ` - This setting is no longer used. npm stores temporary files in a special - location in the cache, and they are managed by - [\`cacache\`](http://npm.im/cacache). - `, - description: ` - Historically, the location where temporary files were stored. No longer - relevant. - `, -}) - define('umask', { default: 0, type: Umask, @@ -2222,7 +2186,7 @@ define('user-agent', { `, flatten (key, obj, flatOptions) { const value = obj[key] - const ciName = obj['ci-name'] + const ciName = ciInfo.name?.toLowerCase().split(' ').join('-') || null let inWorkspaces = false if (obj.workspaces || obj.workspace && obj.workspace.length) { inWorkspaces = true diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js index 748f306bd2ce34..51c7aa7c352cfc 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js +++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js @@ -25,9 +25,6 @@ const flatten = (obj, flat = {}) => { : /* istanbul ignore next - not configurable property */ undefined flat.nodeBin = process.env.NODE || process.execPath - // XXX should this be sha512? is it even relevant? - flat.hashAlgorithm = 'sha1' - return flat } diff --git a/deps/npm/node_modules/@npmcli/config/lib/index.js b/deps/npm/node_modules/@npmcli/config/lib/index.js index 0e19d32e3f8b45..e46fe3d2aa2f3d 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/index.js +++ b/deps/npm/node_modules/@npmcli/config/lib/index.js @@ -322,10 +322,6 @@ class Config { const { data } = this.data.get('default') - // the metrics-registry defaults to the current resolved value of - // the registry, unless overridden somewhere else. - settableGetter(data, 'metrics-registry', () => this.#get('registry')) - // if the prefix is set on cli, env, or userconfig, then we need to // default the globalconfig file to that location, instead of the default // global prefix. It's weird that `npm get globalconfig --prefix=/foo` @@ -614,7 +610,15 @@ class Config { process.emit('time', 'config:load:file:' + file) // only catch the error from readFile, not from the loadObject call await readFile(file, 'utf8').then( - data => this.#loadObject(ini.parse(data), type, file), + data => { + const parsedConfig = ini.parse(data) + if (type === 'project' && parsedConfig.prefix) { + // Log error if prefix is mentioned in project .npmrc + /* eslint-disable-next-line max-len */ + log.error('config', `prefix cannot be changed from project config: ${file}.`) + } + return this.#loadObject(parsedConfig, type, file) + }, er => this.#loadObject(null, type, file, er) ) process.emit('timeEnd', 'config:load:file:' + file) diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 76d193ba23ec4c..93f2cd097d6f2d 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "6.2.1", + "version": "7.1.0", "files": [ "bin/", "lib/" @@ -33,7 +33,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-globals": "^1.0.0", "@npmcli/template-oss": "4.18.0", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "dependencies": { "@npmcli/map-workspaces": "^3.0.2", diff --git a/deps/npm/node_modules/@npmcli/git/lib/revs.js b/deps/npm/node_modules/@npmcli/git/lib/revs.js index ee72370d5b7eca..ca14837de1b876 100644 --- a/deps/npm/node_modules/@npmcli/git/lib/revs.js +++ b/deps/npm/node_modules/@npmcli/git/lib/revs.js @@ -1,8 +1,8 @@ const pinflight = require('promise-inflight') const spawn = require('./spawn.js') -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') -const revsCache = new LRU({ +const revsCache = new LRUCache({ max: 100, ttl: 5 * 60 * 1000, }) diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json index eeba1c0415788c..6ab037d841cc34 100644 --- a/deps/npm/node_modules/@npmcli/git/package.json +++ b/deps/npm/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "4.1.0", + "version": "5.0.3", "main": "lib/index.js", "files": [ "bin/", @@ -31,27 +31,33 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", - "npm-package-arg": "^10.0.0", + "@npmcli/template-oss": "4.18.0", + "npm-package-arg": "^11.0.0", "slash": "^3.0.0", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", + "@npmcli/promise-spawn": "^7.0.0", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^9.0.0", "proc-log": "^3.0.0", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^3.0.0" + "which": "^4.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.15.1", - "publish": true + "version": "4.18.0", + "publish": true, + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json index 18ebb68c4bfd1a..4d0af031d54148 100644 --- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json +++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "5.0.1", + "version": "7.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -34,22 +34,28 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.18.0", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "dependencies": { - "cacache": "^17.0.0", + "cacache": "^18.0.0", "json-parse-even-better-errors": "^3.0.0", - "pacote": "^15.0.0", + "pacote": "^17.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", - "publish": "true" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json index 33215b638db6ee..ab320e8695ca3d 100644 --- a/deps/npm/node_modules/@npmcli/package-json/package.json +++ b/deps/npm/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "4.0.1", + "version": "5.0.0", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ @@ -25,17 +25,17 @@ "license": "ISC", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.17.0", + "@npmcli/template-oss": "4.18.0", "read-package-json": "^6.0.4", "read-package-json-fast": "^3.0.2", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/git": "^4.1.0", + "@npmcli/git": "^5.0.0", "glob": "^10.2.2", - "hosted-git-info": "^6.1.1", + "hosted-git-info": "^7.0.0", "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", + "normalize-package-data": "^6.0.0", "proc-log": "^3.0.0", "semver": "^7.5.3" }, @@ -44,12 +44,18 @@ "url": "https://github.com/npm/package-json.git" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.17.0", - "publish": "true" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json index 2080d9f5be9f04..ffd89f1083341c 100644 --- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json +++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "6.0.2", + "version": "7.0.0", "files": [ "bin/", "lib/" @@ -32,19 +32,25 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", - "minipass": "^4.0.0", + "@npmcli/template-oss": "4.18.0", "spawk": "^1.7.1", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", + "publish": true }, "dependencies": { - "which": "^3.0.0" + "which": "^4.0.0" } } diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json index 38f6f72fa6ad90..7e7d2561571566 100644 --- a/deps/npm/node_modules/@npmcli/run-script/package.json +++ b/deps/npm/node_modules/@npmcli/run-script/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/run-script", - "version": "6.0.2", + "version": "7.0.1", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", @@ -16,16 +16,16 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", + "@npmcli/template-oss": "4.18.0", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "dependencies": { "@npmcli/node-gyp": "^3.0.0", - "@npmcli/promise-spawn": "^6.0.0", + "@npmcli/promise-spawn": "^7.0.0", "node-gyp": "^9.0.0", "read-package-json-fast": "^3.0.0", - "which": "^3.0.0" + "which": "^4.0.0" }, "files": [ "bin/", @@ -37,11 +37,17 @@ "url": "https://github.com/npm/run-script.git" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.15.1", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/@sigstore/bundle/LICENSE b/deps/npm/node_modules/@sigstore/bundle/LICENSE new file mode 100644 index 00000000000000..e9e7c1679a09df --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/build.js b/deps/npm/node_modules/@sigstore/bundle/dist/build.js new file mode 100644 index 00000000000000..6990f5451a2d33 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/dist/build.js @@ -0,0 +1,89 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(options) { + return { + mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE, + content: { + $case: 'messageSignature', + messageSignature: { + messageDigest: { + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, + digest: options.digest, + }, + signature: options.signature, + }, + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +exports.toMessageSignatureBundle = toMessageSignatureBundle; +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(options) { + return { + mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE, + content: { + $case: 'dsseEnvelope', + dsseEnvelope: toEnvelope(options), + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +exports.toDSSEBundle = toDSSEBundle; +function toEnvelope(options) { + return { + payloadType: options.artifactType, + payload: options.artifact, + signatures: [toSignature(options)], + }; +} +function toSignature(options) { + return { + keyid: options.keyHint || '', + sig: options.signature, + }; +} +// Verification material +function toVerificationMaterial(options) { + return { + content: toKeyContent(options), + tlogEntries: [], + timestampVerificationData: { rfc3161Timestamps: [] }, + }; +} +function toKeyContent(options) { + if (options.certificate) { + return { + $case: 'x509CertificateChain', + x509CertificateChain: { + certificates: [{ rawBytes: options.certificate }], + }, + }; + } + else { + return { + $case: 'publicKey', + publicKey: { + hint: options.keyHint || '', + }, + }; + } +} diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js b/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js new file mode 100644 index 00000000000000..8c01e2d19c5ecb --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; +exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; +exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; +// Type guards for bundle variants. +function isBundleWithCertificateChain(b) { + return b.verificationMaterial.content.$case === 'x509CertificateChain'; +} +exports.isBundleWithCertificateChain = isBundleWithCertificateChain; +function isBundleWithPublicKey(b) { + return b.verificationMaterial.content.$case === 'publicKey'; +} +exports.isBundleWithPublicKey = isBundleWithPublicKey; +function isBundleWithMessageSignature(b) { + return b.content.$case === 'messageSignature'; +} +exports.isBundleWithMessageSignature = isBundleWithMessageSignature; +function isBundleWithDsseEnvelope(b) { + return b.content.$case === 'dsseEnvelope'; +} +exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/error.js b/deps/npm/node_modules/@sigstore/bundle/dist/error.js new file mode 100644 index 00000000000000..f84295323b812e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/dist/error.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValidationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ValidationError extends Error { + constructor(message, fields) { + super(message); + this.fields = fields; + } +} +exports.ValidationError = ValidationError; diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/index.js b/deps/npm/node_modules/@sigstore/bundle/dist/index.js new file mode 100644 index 00000000000000..b016a16d11cc00 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/dist/index.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var build_1 = require("./build"); +Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); +Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); +Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); +Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); +Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); +Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); +var serialized_1 = require("./serialized"); +Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); +Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); +Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); +Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); +var validate_1 = require("./validate"); +Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); +Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); +Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); +Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js b/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js new file mode 100644 index 00000000000000..f1073358cacfd7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const validate_1 = require("./validate"); +const bundleFromJSON = (obj) => { + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); + (0, validate_1.assertBundle)(bundle); + return bundle; +}; +exports.bundleFromJSON = bundleFromJSON; +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; +const envelopeFromJSON = (obj) => { + return protobuf_specs_1.Envelope.fromJSON(obj); +}; +exports.envelopeFromJSON = envelopeFromJSON; +const envelopeToJSON = (envelope) => { + return protobuf_specs_1.Envelope.toJSON(envelope); +}; +exports.envelopeToJSON = envelopeToJSON; diff --git a/deps/npm/node_modules/sigstore/dist/identity/provider.js b/deps/npm/node_modules/@sigstore/bundle/dist/utility.js similarity index 100% rename from deps/npm/node_modules/sigstore/dist/identity/provider.js rename to deps/npm/node_modules/@sigstore/bundle/dist/utility.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js b/deps/npm/node_modules/@sigstore/bundle/dist/validate.js similarity index 53% rename from deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js rename to deps/npm/node_modules/@sigstore/bundle/dist/validate.js index a19d8ad3ec7021..015b6dfc58dd73 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js +++ b/deps/npm/node_modules/@sigstore/bundle/dist/validate.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.assertValidBundle = void 0; +exports.assertBundleLatest = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0; /* Copyright 2023 The Sigstore Authors. @@ -16,13 +16,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const error_1 = require("../../error"); +const bundle_1 = require("./bundle"); +const error_1 = require("./error"); // Performs basic validation of a Sigstore bundle to ensure that all required // fields are populated. This is not a complete validation of the bundle, but // rather a check that the bundle is in a valid state to be processed by the // rest of the code. -function assertValidBundle(b) { +function assertBundle(b) { const invalidValues = []; + // Media type validation + if (b.mediaType === undefined || + !b.mediaType.startsWith('application/vnd.dev.sigstore.bundle+json;version=')) { + invalidValues.push('mediaType'); + } // Content-related validation if (b.content === undefined) { invalidValues.push('content'); @@ -80,9 +86,75 @@ function assertValidBundle(b) { break; } } + if (b.verificationMaterial.tlogEntries === undefined) { + invalidValues.push('verificationMaterial.tlogEntries'); + } + else { + if (b.verificationMaterial.tlogEntries.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.logId === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); + } + if (entry.kindVersion === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); + } + }); + } + } + } + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +exports.assertBundle = assertBundle; +// Asserts that the given bundle conforms to the v0.1 bundle format. +function assertBundleV01(b) { + const invalidValues = []; + if (b.mediaType && b.mediaType !== bundle_1.BUNDLE_V01_MEDIA_TYPE) { + invalidValues.push('mediaType'); + } + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionPromise === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); + } + }); + } + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); + } +} +exports.assertBundleV01 = assertBundleV01; +// Type guard to determine if Bundle is a v0.1 bundle. +function isBundleV01(b) { + try { + assertBundleV01(b); + return true; + } + catch (e) { + return false; + } +} +exports.isBundleV01 = isBundleV01; +// Asserts that the given bundle conforms to the newest (0.2) bundle format. +function assertBundleLatest(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionProof === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); + } + else { + if (entry.inclusionProof.checkpoint === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); + } + } + }); } if (invalidValues.length > 0) { - throw new error_1.ValidationError(`invalid/missing bundle values: ${invalidValues.join(', ')}`); + throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); } } -exports.assertValidBundle = assertValidBundle; +exports.assertBundleLatest = assertBundleLatest; diff --git a/deps/npm/node_modules/@sigstore/bundle/package.json b/deps/npm/node_modules/@sigstore/bundle/package.json new file mode 100644 index 00000000000000..7e26efa11a21de --- /dev/null +++ b/deps/npm/node_modules/@sigstore/bundle/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/bundle", + "version": "2.1.0", + "description": "Sigstore bundle type", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } +} diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js index 715bb1aa5b57d5..0c367a8384454c 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -44,7 +44,7 @@ exports.Signature = { return obj; }, }; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } @@ -60,11 +60,11 @@ var globalThis = (() => { throw "Unable to locate global object"; })(); function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); } else { - const bin = globalThis.atob(b64); + const bin = tsProtoGlobalThis.atob(b64); const arr = new Uint8Array(bin.length); for (let i = 0; i < bin.length; ++i) { arr[i] = bin.charCodeAt(i); @@ -73,15 +73,15 @@ function bytesFromBase64(b64) { } } function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); } else { const bin = []; arr.forEach((byte) => { bin.push(String.fromCharCode(byte)); }); - return globalThis.btoa(bin.join("")); + return tsProtoGlobalThis.btoa(bin.join("")); } } function isSet(value) { diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 00000000000000..073093b8371a8f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,185 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +function createBaseCloudEvent() { + return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined }; +} +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? String(object.id) : "", + source: isSet(object.source) ? String(object.source) : "", + specVersion: isSet(object.specVersion) ? String(object.specVersion) : "", + type: isSet(object.type) ? String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.id !== undefined && (obj.id = message.id); + message.source !== undefined && (obj.source = message.source); + message.specVersion !== undefined && (obj.specVersion = message.specVersion); + message.type !== undefined && (obj.type = message.type); + obj.attributes = {}; + if (message.attributes) { + Object.entries(message.attributes).forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + message.data?.$case === "binaryData" && + (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined); + message.data?.$case === "textData" && (obj.textData = message.data?.textData); + message.data?.$case === "protoData" && + (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_AttributesEntry() { + return { key: "", value: undefined }; +} +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && + (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined); + return obj; + }, +}; +function createBaseCloudEvent_CloudEventAttributeValue() { + return { attr: undefined }; +} +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean); + message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger)); + message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString); + message.attr?.$case === "ceBytes" && + (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined); + message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri); + message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef); + message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString()); + return obj; + }, +}; +function createBaseCloudEventBatch() { + return { events: [] }; +} +exports.CloudEventBatch = { + fromJSON(object) { + return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined); + } + else { + obj.events = []; + } + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function fromTimestamp(t) { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof Date) { + return o; + } + else if (typeof o === "string") { + return new Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js index f9b57cccdc3d3b..da627499ad7659 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -77,7 +77,7 @@ function fieldBehaviorFromJSON(object) { case "UNORDERED_LIST": return FieldBehavior.UNORDERED_LIST; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); } } exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; @@ -98,11 +98,11 @@ function fieldBehaviorToJSON(object) { case FieldBehavior.UNORDERED_LIST: return "UNORDERED_LIST"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); } } exports.fieldBehaviorToJSON = fieldBehaviorToJSON; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 00000000000000..6b3f3c97a66476 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,65 @@ +"use strict"; +/* eslint-disable */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +function createBaseAny() { + return { typeUrl: "", value: Buffer.alloc(0) }; +} +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0))); + return obj; + }, +}; +var tsProtoGlobalThis = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); +function bytesFromBase64(b64) { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } + else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } + else { + const bin = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js index b8cfc86ab99aad..d429aac8460436 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -102,7 +102,7 @@ function fieldDescriptorProto_TypeFromJSON(object) { case "TYPE_SINT64": return FieldDescriptorProto_Type.TYPE_SINT64; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); } } exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; @@ -145,7 +145,7 @@ function fieldDescriptorProto_TypeToJSON(object) { case FieldDescriptorProto_Type.TYPE_SINT64: return "TYPE_SINT64"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); } } exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; @@ -168,7 +168,7 @@ function fieldDescriptorProto_LabelFromJSON(object) { case "LABEL_REPEATED": return FieldDescriptorProto_Label.LABEL_REPEATED; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); } } exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; @@ -181,7 +181,7 @@ function fieldDescriptorProto_LabelToJSON(object) { case FieldDescriptorProto_Label.LABEL_REPEATED: return "LABEL_REPEATED"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); } } exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; @@ -207,7 +207,7 @@ function fileOptions_OptimizeModeFromJSON(object) { case "LITE_RUNTIME": return FileOptions_OptimizeMode.LITE_RUNTIME; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); } } exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; @@ -220,7 +220,7 @@ function fileOptions_OptimizeModeToJSON(object) { case FileOptions_OptimizeMode.LITE_RUNTIME: return "LITE_RUNTIME"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); } } exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; @@ -243,7 +243,7 @@ function fieldOptions_CTypeFromJSON(object) { case "STRING_PIECE": return FieldOptions_CType.STRING_PIECE; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); } } exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; @@ -256,7 +256,7 @@ function fieldOptions_CTypeToJSON(object) { case FieldOptions_CType.STRING_PIECE: return "STRING_PIECE"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); } } exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; @@ -281,7 +281,7 @@ function fieldOptions_JSTypeFromJSON(object) { case "JS_NUMBER": return FieldOptions_JSType.JS_NUMBER; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); } } exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; @@ -294,7 +294,7 @@ function fieldOptions_JSTypeToJSON(object) { case FieldOptions_JSType.JS_NUMBER: return "JS_NUMBER"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); } } exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; @@ -323,7 +323,7 @@ function methodOptions_IdempotencyLevelFromJSON(object) { case "IDEMPOTENT": return MethodOptions_IdempotencyLevel.IDEMPOTENT; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); } } exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; @@ -336,7 +336,7 @@ function methodOptions_IdempotencyLevelToJSON(object) { case MethodOptions_IdempotencyLevel.IDEMPOTENT: return "IDEMPOTENT"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); } } exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; @@ -1263,7 +1263,7 @@ exports.GeneratedCodeInfo_Annotation = { return obj; }, }; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } @@ -1279,11 +1279,11 @@ var globalThis = (() => { throw "Unable to locate global object"; })(); function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); } else { - const bin = globalThis.atob(b64); + const bin = tsProtoGlobalThis.atob(b64); const arr = new Uint8Array(bin.length); for (let i = 0; i < bin.length; ++i) { arr[i] = bin.charCodeAt(i); @@ -1292,15 +1292,15 @@ function bytesFromBase64(b64) { } } function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); } else { const bin = []; arr.forEach((byte) => { bin.push(String.fromCharCode(byte)); }); - return globalThis.btoa(bin.join("")); + return tsProtoGlobalThis.btoa(bin.join("")); } } function isSet(value) { diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js index 63ace8db580cc8..bcd654e9154b92 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -26,7 +26,7 @@ function hashAlgorithmFromJSON(object) { case "SHA2_256": return HashAlgorithm.SHA2_256; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); } } exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; @@ -37,7 +37,7 @@ function hashAlgorithmToJSON(object) { case HashAlgorithm.SHA2_256: return "SHA2_256"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); } } exports.hashAlgorithmToJSON = hashAlgorithmToJSON; @@ -92,7 +92,7 @@ function publicKeyDetailsFromJSON(object) { case "PKIX_ED25519": return PublicKeyDetails.PKIX_ED25519; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); } } exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; @@ -115,7 +115,7 @@ function publicKeyDetailsToJSON(object) { case PublicKeyDetails.PKIX_ED25519: return "PKIX_ED25519"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); } } exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; @@ -146,7 +146,7 @@ function subjectAlternativeNameTypeFromJSON(object) { case "OTHER_NAME": return SubjectAlternativeNameType.OTHER_NAME; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); } } exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; @@ -161,7 +161,7 @@ function subjectAlternativeNameTypeToJSON(object) { case SubjectAlternativeNameType.OTHER_NAME: return "OTHER_NAME"; default: - throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); } } exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; @@ -396,7 +396,7 @@ exports.TimeRange = { return obj; }, }; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } @@ -412,11 +412,11 @@ var globalThis = (() => { throw "Unable to locate global object"; })(); function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); } else { - const bin = globalThis.atob(b64); + const bin = tsProtoGlobalThis.atob(b64); const arr = new Uint8Array(bin.length); for (let i = 0; i < bin.length; ++i) { arr[i] = bin.charCodeAt(i); @@ -425,15 +425,15 @@ function bytesFromBase64(b64) { } } function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); } else { const bin = []; arr.forEach((byte) => { bin.push(String.fromCharCode(byte)); }); - return globalThis.btoa(bin.join("")); + return tsProtoGlobalThis.btoa(bin.join("")); } } function fromTimestamp(t) { diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js index bffc7700edbec1..398193b2075a70 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -122,7 +122,7 @@ exports.TransparencyLogEntry = { return obj; }, }; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } @@ -138,11 +138,11 @@ var globalThis = (() => { throw "Unable to locate global object"; })(); function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); } else { - const bin = globalThis.atob(b64); + const bin = tsProtoGlobalThis.atob(b64); const arr = new Uint8Array(bin.length); for (let i = 0; i < bin.length; ++i) { arr[i] = bin.charCodeAt(i); @@ -151,15 +151,15 @@ function bytesFromBase64(b64) { } } function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); } else { const bin = []; arr.forEach((byte) => { bin.push(String.fromCharCode(byte)); }); - return globalThis.btoa(bin.join("")); + return tsProtoGlobalThis.btoa(bin.join("")); } } function isSet(value) { diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js index b99a305ba53172..8a72b897618697 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -228,7 +228,7 @@ exports.Input = { return obj; }, }; -var globalThis = (() => { +var tsProtoGlobalThis = (() => { if (typeof globalThis !== "undefined") { return globalThis; } @@ -244,11 +244,11 @@ var globalThis = (() => { throw "Unable to locate global object"; })(); function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); } else { - const bin = globalThis.atob(b64); + const bin = tsProtoGlobalThis.atob(b64); const arr = new Uint8Array(bin.length); for (let i = 0; i < bin.length; ++i) { arr[i] = bin.charCodeAt(i); @@ -257,15 +257,15 @@ function bytesFromBase64(b64) { } } function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); } else { const bin = []; arr.forEach((byte) => { bin.push(String.fromCharCode(byte)); }); - return globalThis.btoa(bin.join("")); + return tsProtoGlobalThis.btoa(bin.join("")); } } function isSet(value) { diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json index 7cb4aa9c5364ff..450abb157f31ab 100644 --- a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json @@ -1,6 +1,6 @@ { "name": "@sigstore/protobuf-specs", - "version": "0.1.0", + "version": "0.2.1", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/deps/npm/node_modules/@sigstore/sign/LICENSE b/deps/npm/node_modules/@sigstore/sign/LICENSE new file mode 100644 index 00000000000000..e9e7c1679a09df --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js new file mode 100644 index 00000000000000..61d5eba4568a35 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseBundleBuilder = void 0; +// BaseBundleBuilder is a base class for BundleBuilder implementations. It +// provides a the basic wokflow for signing and witnessing an artifact. +// Subclasses must implement the `package` method to assemble a valid bundle +// with the generated signature and verification material. +class BaseBundleBuilder { + constructor(options) { + this.signer = options.signer; + this.witnesses = options.witnesses; + } + // Executes the signing/witnessing process for the given artifact. + async create(artifact) { + const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); + const bundle = await this.package(artifact, signature); + // Invoke all of the witnesses in parallel + const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); + // Collect the verification material from all of the witnesses + const tlogEntryList = []; + const timestampList = []; + verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { + tlogEntryList.push(...(tlogEntries ?? [])); + timestampList.push(...(rfc3161Timestamps ?? [])); + }); + // Merge the collected verification material into the bundle + bundle.verificationMaterial.tlogEntries = tlogEntryList; + bundle.verificationMaterial.timestampVerificationData = { + rfc3161Timestamps: timestampList, + }; + return bundle; + } + // Override this function to apply any pre-signing transformations to the + // artifact. The returned buffer will be signed by the signer. The default + // implementation simply returns the artifact data. + async prepare(artifact) { + return artifact.data; + } +} +exports.BaseBundleBuilder = BaseBundleBuilder; +// Extracts the public key from a KeyMaterial. Returns either the public key +// or the certificate, depending on the type of key material. +function publicKey(key) { + switch (key.$case) { + case 'publicKey': + return key.publicKey; + case 'x509Certificate': + return key.certificate; + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js new file mode 100644 index 00000000000000..f01aac252b304a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js @@ -0,0 +1,70 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const sigstore = __importStar(require("@sigstore/bundle")); +const util_1 = require("../util"); +// Helper functions for assembling the parts of a Sigstore bundle +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(artifact, signature) { + const digest = util_1.crypto.hash(artifact.data); + return sigstore.toMessageSignatureBundle({ + digest, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + }); +} +exports.toMessageSignatureBundle = toMessageSignatureBundle; +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(artifact, signature) { + return sigstore.toDSSEBundle({ + artifact: artifact.data, + artifactType: artifact.type, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + }); +} +exports.toDSSEBundle = toDSSEBundle; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js new file mode 100644 index 00000000000000..486d289aea38cb --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSEBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../util"); +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for DSSE wrapped attestations +class DSSEBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + // DSSE requires the artifact to be pre-encoded with the payload type + // before the signature is generated. + async prepare(artifact) { + const a = artifactDefaults(artifact); + return util_1.dsse.preAuthEncoding(a.type, a.data); + } + // Packages the artifact and signature into a DSSE bundle + async package(artifact, signature) { + return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature); + } +} +exports.DSSEBundleBuilder = DSSEBundleBuilder; +// Defaults the artifact type to an empty string if not provided +function artifactDefaults(artifact) { + return { + ...artifact, + type: artifact.type ?? '', + }; +} diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js new file mode 100644 index 00000000000000..d67c8c324a4f04 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var dsse_1 = require("./dsse"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); +var message_1 = require("./message"); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js new file mode 100644 index 00000000000000..e3991f42bab939 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for raw message signatures +class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + async package(artifact, signature) { + return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); + } +} +exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/error.js b/deps/npm/node_modules/@sigstore/sign/dist/error.js new file mode 100644 index 00000000000000..d57e4567fb89ee --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/error.js @@ -0,0 +1,39 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.internalError = exports.InternalError = void 0; +const error_1 = require("./external/error"); +class InternalError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.name = this.constructor.name; + this.cause = cause; + this.code = code; + } +} +exports.InternalError = InternalError; +function internalError(err, code, message) { + if (err instanceof error_1.HTTPError) { + message += ` - ${err.message}`; + } + throw new InternalError({ + code: code, + message: message, + cause: err, + }); +} +exports.internalError = internalError; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/error.js b/deps/npm/node_modules/@sigstore/sign/dist/external/error.js new file mode 100644 index 00000000000000..0dad92ea69414f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/external/error.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkStatus = exports.HTTPError = void 0; +class HTTPError extends Error { + constructor({ status, message, location, }) { + super(`(${status}) ${message}`); + this.statusCode = status; + this.location = location; + } +} +exports.HTTPError = HTTPError; +const checkStatus = async (response) => { + if (response.ok) { + return response; + } + else { + let message = response.statusText; + const location = response.headers?.get('Location') || undefined; + const contentType = response.headers?.get('Content-Type'); + // If response type is JSON, try to parse the body for a message + if (contentType?.includes('application/json')) { + try { + await response.json().then((body) => { + message = body.message; + }); + } + catch (e) { + // ignore + } + } + throw new HTTPError({ + status: response.status, + message: message, + location: location, + }); + } +}; +exports.checkStatus = checkStatus; diff --git a/deps/npm/node_modules/sigstore/dist/external/fulcio.js b/deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js similarity index 95% rename from deps/npm/node_modules/sigstore/dist/external/fulcio.js rename to deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js index aeb48d58d8d83e..f00b62e147cd7f 100644 --- a/deps/npm/node_modules/sigstore/dist/external/fulcio.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js @@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Fulcio = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -43,7 +43,7 @@ class Fulcio { method: 'POST', body: JSON.stringify(request), }); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); const data = await response.json(); return data; } diff --git a/deps/npm/node_modules/sigstore/dist/external/rekor.js b/deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js similarity index 94% rename from deps/npm/node_modules/sigstore/dist/external/rekor.js rename to deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js index b6bbeb6f207934..6f6cb96cc9c5cb 100644 --- a/deps/npm/node_modules/sigstore/dist/external/rekor.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js @@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Rekor = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -49,7 +49,7 @@ class Rekor { headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(propsedEntry), }); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); const data = await response.json(); return entryFromResponse(data); } @@ -61,7 +61,7 @@ class Rekor { async getEntry(uuid) { const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`; const response = await this.fetch(url); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); const data = await response.json(); return entryFromResponse(data); } @@ -77,7 +77,7 @@ class Rekor { body: JSON.stringify(opts), headers: { 'Content-Type': 'application/json' }, }); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); const data = await response.json(); return data; } @@ -93,7 +93,7 @@ class Rekor { body: JSON.stringify(opts), headers: { 'Content-Type': 'application/json' }, }); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); const rawData = await response.json(); const data = rawData.map((d) => entryFromResponse(d)); return data; diff --git a/deps/npm/node_modules/sigstore/dist/external/tsa.js b/deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js similarity index 97% rename from deps/npm/node_modules/sigstore/dist/external/tsa.js rename to deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js index 5277d7d3f97071..252c14f2d32d87 100644 --- a/deps/npm/node_modules/sigstore/dist/external/tsa.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js @@ -40,7 +40,7 @@ class TimestampAuthority { method: 'POST', body: JSON.stringify(request), }); - (0, error_1.checkStatus)(response); + await (0, error_1.checkStatus)(response); return response.buffer(); } } diff --git a/deps/npm/node_modules/sigstore/dist/identity/ci.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js similarity index 90% rename from deps/npm/node_modules/sigstore/dist/identity/ci.js rename to deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js index 0f01e1baaec57d..d79133952b605b 100644 --- a/deps/npm/node_modules/sigstore/dist/identity/ci.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js @@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CIContextProvider = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ See the License for the specific language governing permissions and limitations under the License. */ const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); -const util_1 = require("../util"); // Collection of all the CI-specific providers we have implemented const providers = [getGHAToken, getEnv]; /** @@ -29,15 +28,14 @@ const providers = [getGHAToken, getEnv]; * one that resolves. */ class CIContextProvider { - constructor(audience) { + /* istanbul ignore next */ + constructor(audience = 'sigstore') { this.audience = audience; } // Invoke all registered ProviderFuncs and return the value of whichever one // resolves first. async getToken() { - return util_1.promise - .promiseAny(providers.map((getToken) => getToken(this.audience))) - .catch(() => Promise.reject('CI: no tokens available')); + return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); } } exports.CIContextProvider = CIContextProvider; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js new file mode 100644 index 00000000000000..1c1223b443fab6 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var ci_1 = require("./ci"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js similarity index 100% rename from deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.js rename to deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js diff --git a/deps/npm/node_modules/@sigstore/sign/dist/index.js b/deps/npm/node_modules/@sigstore/sign/dist/index.js new file mode 100644 index 00000000000000..383b76083361b9 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var bundler_1 = require("./bundler"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); +var identity_1 = require("./identity"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); +var signer_1 = require("./signer"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); +var witness_1 = require("./witness"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js new file mode 100644 index 00000000000000..81b421eabadb2e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js @@ -0,0 +1,60 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const fulcio_1 = require("../../external/fulcio"); +class CAClient { + constructor(options) { + this.fulcio = new fulcio_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createSigningCertificate(identityToken, publicKey, challenge) { + const request = toCertificateRequest(identityToken, publicKey, challenge); + try { + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return cert.chain.certificates; + } + catch (err) { + (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); + } + } +} +exports.CAClient = CAClient; +function toCertificateRequest(identityToken, publicKey, challenge) { + return { + credentials: { + oidcIdentityToken: identityToken, + }, + publicKeyRequest: { + publicKey: { + algorithm: 'ECDSA', + content: publicKey, + }, + proofOfPossession: challenge.toString('base64'), + }, + }; +} diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js new file mode 100644 index 00000000000000..481aa5c3579a27 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EphemeralSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const EC_KEYPAIR_TYPE = 'ec'; +const P256_CURVE = 'P-256'; +// Signer implementation which uses an ephemeral keypair to sign artifacts. +// The private key lives only in memory and is tied to the lifetime of the +// EphemeralSigner instance. +class EphemeralSigner { + constructor() { + this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { + namedCurve: P256_CURVE, + }); + } + async sign(data) { + const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); + const publicKey = this.keypair.publicKey + .export({ format: 'pem', type: 'spki' }) + .toString('ascii'); + return { + signature: signature, + key: { $case: 'publicKey', publicKey }, + }; + } +} +exports.EphemeralSigner = EphemeralSigner; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js new file mode 100644 index 00000000000000..89a432548d2b42 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js @@ -0,0 +1,87 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const util_1 = require("../../util"); +const ca_1 = require("./ca"); +const ephemeral_1 = require("./ephemeral"); +exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; +// Signer implementation which can be used to decorate another signer +// with a Fulcio-issued signing certificate for the signer's public key. +// Must be instantiated with an identity provider which can provide a JWT +// which represents the identity to be bound to the signing certificate. +class FulcioSigner { + constructor(options) { + this.ca = new ca_1.CAClient({ + ...options, + fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, + }); + this.identityProvider = options.identityProvider; + this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); + } + async sign(data) { + // Retrieve identity token from the supplied identity provider + const identityToken = await this.getIdentityToken(); + // Extract challenge claim from OIDC token + let subject; + try { + subject = util_1.oidc.extractJWTSubject(identityToken); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_PARSE_ERROR', + message: `invalid identity token: ${identityToken}`, + cause: err, + }); + } + // Construct challenge value by signing the subject claim + const challenge = await this.keyHolder.sign(Buffer.from(subject)); + if (challenge.key.$case !== 'publicKey') { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'unexpected format for signing key', + }); + } + // Create signing certificate + const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); + // Generate artifact signature + const signature = await this.keyHolder.sign(data); + // Specifically returning only the first certificate in the chain + // as the key. + return { + signature: signature.signature, + key: { + $case: 'x509Certificate', + certificate: certificates[0], + }, + }; + } + async getIdentityToken() { + try { + return await this.identityProvider.getToken(); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_READ_ERROR', + message: 'error retrieving identity token', + cause: err, + }); + } + } +} +exports.FulcioSigner = FulcioSigner; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js new file mode 100644 index 00000000000000..06ec9dbe72fe14 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var fulcio_1 = require("./fulcio"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/deps/npm/node_modules/sigstore/bin/sigstore.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js old mode 100755 new mode 100644 similarity index 82% rename from deps/npm/node_modules/sigstore/bin/sigstore.js rename to deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js index a07b7bdc1af95a..b92c54183375d9 --- a/deps/npm/node_modules/sigstore/bin/sigstore.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js @@ -1,6 +1,6 @@ -#!/usr/bin/env node +"use strict"; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -14,4 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -require('../dist/cli').processArgv(); +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js b/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js new file mode 100644 index 00000000000000..c8ad2e549bdc68 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js b/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js new file mode 100644 index 00000000000000..11aad2fb6ff8b0 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js @@ -0,0 +1,27 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hash = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const SHA256_ALGORITHM = 'sha256'; +function hash(data, algorithm = SHA256_ALGORITHM) { + return crypto_1.default.createHash(algorithm).update(data).digest(); +} +exports.hash = hash; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js b/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js new file mode 100644 index 00000000000000..befcdbdc14ec81 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.preAuthEncoding = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PAE_PREFIX = 'DSSEv1'; +// DSSE Pre-Authentication Encoding +function preAuthEncoding(payloadType, payload) { + const prefix = Buffer.from(`${PAE_PREFIX} ${payloadType.length} ${payloadType} ${payload.length} `, 'ascii'); + return Buffer.concat([prefix, payload]); +} +exports.preAuthEncoding = preAuthEncoding; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js b/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js new file mode 100644 index 00000000000000..b020ac4d6ecd42 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.base64Decode = exports.base64Encode = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const BASE64_ENCODING = 'base64'; +const UTF8_ENCODING = 'utf-8'; +function base64Encode(str) { + return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); +} +exports.base64Encode = base64Encode; +function base64Decode(str) { + return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); +} +exports.base64Decode = base64Decode; diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/index.js b/deps/npm/node_modules/@sigstore/sign/dist/util/index.js new file mode 100644 index 00000000000000..567e5dbf6e04c7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/index.js @@ -0,0 +1,48 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ua = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +exports.crypto = __importStar(require("./crypto")); +exports.dsse = __importStar(require("./dsse")); +exports.encoding = __importStar(require("./encoding")); +exports.json = __importStar(require("./json")); +exports.oidc = __importStar(require("./oidc")); +exports.pem = __importStar(require("./pem")); +exports.ua = __importStar(require("./ua")); diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/json.js b/deps/npm/node_modules/@sigstore/sign/dist/util/json.js new file mode 100644 index 00000000000000..69176ad731eb78 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/json.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +// JSON canonicalization per https://github.com/cyberphone/json-canonicalization +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function canonicalize(object) { + let buffer = ''; + if (object === null || typeof object !== 'object' || object.toJSON != null) { + // Primitives or toJSONable objects + buffer += JSON.stringify(object); + } + else if (Array.isArray(object)) { + // Array - maintain element order + buffer += '['; + let first = true; + object.forEach((element) => { + if (!first) { + buffer += ','; + } + first = false; + // recursive call + buffer += canonicalize(element); + }); + buffer += ']'; + } + else { + // Object - Sort properties before serializing + buffer += '{'; + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer += ','; + } + first = false; + buffer += JSON.stringify(property); + buffer += ':'; + // recursive call + buffer += canonicalize(object[property]); + }); + buffer += '}'; + } + return buffer; +} +exports.canonicalize = canonicalize; diff --git a/deps/npm/node_modules/sigstore/dist/util/oidc.js b/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js similarity index 98% rename from deps/npm/node_modules/sigstore/dist/util/oidc.js rename to deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js index 05af90d09ae684..8b49f3bbe84401 100644 --- a/deps/npm/node_modules/sigstore/dist/util/oidc.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js @@ -25,7 +25,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.extractJWTSubject = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js b/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js new file mode 100644 index 00000000000000..36eeebd2052f5e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDER = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PEM_HEADER = /-----BEGIN (.*)-----/; +const PEM_FOOTER = /-----END (.*)-----/; +function toDER(certificate) { + const lines = certificate + .split('\n') + .map((line) => line.match(PEM_HEADER) || line.match(PEM_FOOTER) ? '' : line); + return Buffer.from(lines.join(''), 'base64'); +} +exports.toDER = toDER; diff --git a/deps/npm/node_modules/sigstore/dist/util/ua.js b/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js similarity index 97% rename from deps/npm/node_modules/sigstore/dist/util/ua.js rename to deps/npm/node_modules/@sigstore/sign/dist/util/ua.js index 6db6b5a2723db2..c142330eb8338c 100644 --- a/deps/npm/node_modules/sigstore/dist/util/ua.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js @@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getUserAgent = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js new file mode 100644 index 00000000000000..e200d0638350bb --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var tlog_1 = require("./tlog"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/deps/npm/node_modules/sigstore/dist/tlog/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js similarity index 54% rename from deps/npm/node_modules/sigstore/dist/tlog/index.js rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js index 7f5f531983b37d..22c895f2ca7edd 100644 --- a/deps/npm/node_modules/sigstore/dist/tlog/index.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js @@ -2,7 +2,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.TLogClient = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -16,52 +16,38 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const error_1 = require("../error"); -const external_1 = require("../external"); -const format_1 = require("./format"); +const error_1 = require("../../error"); +const error_2 = require("../../external/error"); +const rekor_1 = require("../../external/rekor"); class TLogClient { constructor(options) { - this.rekor = new external_1.Rekor({ + this.fetchOnConflict = options.fetchOnConflict ?? false; + this.rekor = new rekor_1.Rekor({ baseURL: options.rekorBaseURL, retry: options.retry, timeout: options.timeout, }); } - async createMessageSignatureEntry(digest, sigMaterial, options = {}) { - const proposedEntry = (0, format_1.toProposedHashedRekordEntry)(digest, sigMaterial); - return this.createEntry(proposedEntry, options.fetchOnConflict); - } - async createDSSEEntry(envelope, sigMaterial, options = {}) { - const proposedEntry = (0, format_1.toProposedIntotoEntry)(envelope, sigMaterial); - return this.createEntry(proposedEntry, options.fetchOnConflict); - } - async createEntry(proposedEntry, fetchOnConflict = false) { + async createEntry(proposedEntry) { let entry; try { entry = await this.rekor.createEntry(proposedEntry); } catch (err) { // If the entry already exists, fetch it (if enabled) - if (entryExistsError(err) && fetchOnConflict) { + if (entryExistsError(err) && this.fetchOnConflict) { // Grab the UUID of the existing entry from the location header + /* istanbul ignore next */ const uuid = err.location.split('/').pop() || ''; try { entry = await this.rekor.getEntry(uuid); } catch (err) { - throw new error_1.InternalError({ - code: 'TLOG_FETCH_ENTRY_ERROR', - message: 'error fetching tlog entry', - cause: err, - }); + (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); } } else { - throw new error_1.InternalError({ - code: 'TLOG_CREATE_ENTRY_ERROR', - message: 'error creating tlog entry', - cause: err, - }); + (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); } } return entry; @@ -69,7 +55,7 @@ class TLogClient { } exports.TLogClient = TLogClient; function entryExistsError(value) { - return (value instanceof external_1.HTTPError && + return (value instanceof error_2.HTTPError && value.statusCode === 409 && value.location !== undefined); } diff --git a/deps/npm/node_modules/sigstore/dist/tlog/format.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js similarity index 52% rename from deps/npm/node_modules/sigstore/dist/tlog/format.js rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js index b0eae95098af01..f6c165380ba45d 100644 --- a/deps/npm/node_modules/sigstore/dist/tlog/format.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js @@ -1,30 +1,46 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = exports.toProposedDSSEEntry = void 0; -const sigstore_1 = require("../types/sigstore"); -const util_1 = require("../util"); -const DEFAULT_DSSE_API_VERSION = '0.0.1'; -const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1'; -const DEFAULT_INTOTO_API_VERSION = '0.0.2'; -// Returns a properly formatted Rekor "dsse" entry for the given DSSE -// envelope and signature -function toProposedDSSEEntry(envelope, signature, apiVersion = DEFAULT_DSSE_API_VERSION) { - switch (apiVersion) { - case '0.0.1': - return toProposedDSSEV001Entry(envelope, signature); - default: - throw new Error(`Unsupported dsse kind API version: ${apiVersion}`); +exports.toProposedEntry = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const util_1 = require("../../util"); +function toProposedEntry(content, publicKey, +// TODO: Remove this parameter once have completely switched to 'dsse' entries +entryType = 'intoto') { + switch (content.$case) { + case 'dsseEnvelope': + // TODO: Remove this conditional once have completely switched to 'dsse' entries + if (entryType === 'dsse') { + return toProposedDSSEEntry(content.dsseEnvelope, publicKey); + } + return toProposedIntotoEntry(content.dsseEnvelope, publicKey); + case 'messageSignature': + return toProposedHashedRekordEntry(content.messageSignature, publicKey); } } -exports.toProposedDSSEEntry = toProposedDSSEEntry; +exports.toProposedEntry = toProposedEntry; // Returns a properly formatted Rekor "hashedrekord" entry for the given digest // and signature -function toProposedHashedRekordEntry(digest, signature) { - const hexDigest = digest.toString('hex'); - const b64Signature = signature.signature.toString('base64'); - const b64Key = util_1.encoding.base64Encode(toPublicKey(signature)); +function toProposedHashedRekordEntry(messageSignature, publicKey) { + const hexDigest = messageSignature.messageDigest.digest.toString('hex'); + const b64Signature = messageSignature.signature.toString('base64'); + const b64Key = util_1.encoding.base64Encode(publicKey); return { - apiVersion: DEFAULT_HASHEDREKORD_API_VERSION, + apiVersion: '0.0.1', kind: 'hashedrekord', spec: { data: { @@ -42,61 +58,55 @@ function toProposedHashedRekordEntry(digest, signature) { }, }; } -exports.toProposedHashedRekordEntry = toProposedHashedRekordEntry; -// Returns a properly formatted Rekor "intoto" entry for the given DSSE -// envelope and signature -function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_API_VERSION) { - switch (apiVersion) { - case '0.0.2': - return toProposedIntotoV002Entry(envelope, signature); - default: - throw new Error(`Unsupported intoto kind API version: ${apiVersion}`); - } -} -exports.toProposedIntotoEntry = toProposedIntotoEntry; -function toProposedDSSEV001Entry(envelope, signature) { +// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope +// and signature +function toProposedDSSEEntry(envelope, publicKey) { + const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); + const encodedKey = util_1.encoding.base64Encode(publicKey); return { apiVersion: '0.0.1', kind: 'dsse', spec: { proposedContent: { - envelope: JSON.stringify(sigstore_1.Envelope.toJSON(envelope)), - verifiers: [util_1.encoding.base64Encode(toPublicKey(signature))], + envelope: envelopeJSON, + verifiers: [encodedKey], }, }, }; } -function toProposedIntotoV002Entry(envelope, signature) { +// Returns a properly formatted Rekor "intoto" entry for the given DSSE +// envelope and signature +function toProposedIntotoEntry(envelope, publicKey) { // Calculate the value for the payloadHash field in the Rekor entry const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex'); // Calculate the value for the hash field in the Rekor entry - const envelopeHash = calculateDSSEHash(envelope, signature); + const envelopeHash = calculateDSSEHash(envelope, publicKey); // Collect values for re-creating the DSSE envelope. // Double-encode payload and signature cause that's what Rekor expects const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); const keyid = envelope.signatures[0].keyid; - const publicKey = util_1.encoding.base64Encode(toPublicKey(signature)); + const encodedKey = util_1.encoding.base64Encode(publicKey); // Create the envelope portion of the entry. Note the inclusion of the // publicKey in the signature struct is not a standard part of a DSSE // envelope, but is required by Rekor. - const dsseEnv = { + const dsse = { payloadType: envelope.payloadType, payload: payload, - signatures: [{ sig, publicKey }], + signatures: [{ sig, publicKey: encodedKey }], }; // If the keyid is an empty string, Rekor seems to remove it altogether. We // need to do the same here so that we can properly recreate the entry for // verification. if (keyid.length > 0) { - dsseEnv.signatures[0].keyid = keyid; + dsse.signatures[0].keyid = keyid; } return { apiVersion: '0.0.2', kind: 'intoto', spec: { content: { - envelope: dsseEnv, + envelope: dsse, hash: { algorithm: 'sha256', value: envelopeHash }, payloadHash: { algorithm: 'sha256', value: payloadHash }, }, @@ -110,25 +120,17 @@ function toProposedIntotoV002Entry(envelope, signature) { // * signature is base64 encoded (only the first signature is used) // * keyid is included ONLY if it is NOT an empty string // * The resulting JSON is canonicalized and hashed to a hex string -function calculateDSSEHash(envelope, signature) { - const dsseEnv = { +function calculateDSSEHash(envelope, publicKey) { + const dsse = { payloadType: envelope.payloadType, payload: envelope.payload.toString('base64'), signatures: [ - { - sig: envelope.signatures[0].sig.toString('base64'), - publicKey: toPublicKey(signature), - }, + { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, ], }; // If the keyid is an empty string, Rekor seems to remove it altogether. if (envelope.signatures[0].keyid.length > 0) { - dsseEnv.signatures[0].keyid = envelope.signatures[0].keyid; + dsse.signatures[0].keyid = envelope.signatures[0].keyid; } - return util_1.crypto.hash(util_1.json.canonicalize(dsseEnv)).toString('hex'); -} -function toPublicKey(signature) { - return signature.certificates - ? signature.certificates[0] - : signature.key.value; + return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex'); } diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js new file mode 100644 index 00000000000000..1f098df85390cf --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js @@ -0,0 +1,81 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../../util"); +const client_1 = require("./client"); +const entry_1 = require("./entry"); +exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; +class RekorWitness { + constructor(options) { + this.tlog = new client_1.TLogClient({ + ...options, + rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, + }); + } + async testify(content, publicKey) { + const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey); + const entry = await this.tlog.createEntry(proposedEntry); + return toTransparencyLogEntry(entry); + } +} +exports.RekorWitness = RekorWitness; +function toTransparencyLogEntry(entry) { + const logID = Buffer.from(entry.logID, 'hex'); + // Parse entry body so we can extract the kind and version. + const bodyJSON = util_1.encoding.base64Decode(entry.body); + const entryBody = JSON.parse(bodyJSON); + const promise = entry?.verification?.signedEntryTimestamp + ? inclusionPromise(entry.verification.signedEntryTimestamp) + : undefined; + const proof = entry?.verification?.inclusionProof + ? inclusionProof(entry.verification.inclusionProof) + : undefined; + const tlogEntry = { + logIndex: entry.logIndex.toString(), + logId: { + keyId: logID, + }, + integratedTime: entry.integratedTime.toString(), + kindVersion: { + kind: entryBody.kind, + version: entryBody.apiVersion, + }, + inclusionPromise: promise, + inclusionProof: proof, + canonicalizedBody: Buffer.from(entry.body, 'base64'), + }; + return { + tlogEntries: [tlogEntry], + }; +} +function inclusionPromise(promise) { + return { + signedEntryTimestamp: Buffer.from(promise, 'base64'), + }; +} +function inclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + treeSize: proof.treeSize.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + checkpoint: { + envelope: proof.checkpoint, + }, + }; +} diff --git a/deps/npm/node_modules/sigstore/dist/tsa/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js similarity index 74% rename from deps/npm/node_modules/sigstore/dist/tsa/index.js rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js index 4951b24a93f4fe..a334deb00b7756 100644 --- a/deps/npm/node_modules/sigstore/dist/tsa/index.js +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js @@ -2,7 +2,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.TSAClient = void 0; /* -Copyright 2022 The Sigstore Authors. +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -16,12 +16,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const error_1 = require("../error"); -const external_1 = require("../external"); -const util_1 = require("../util"); +const error_1 = require("../../error"); +const tsa_1 = require("../../external/tsa"); +const util_1 = require("../../util"); class TSAClient { constructor(options) { - this.tsa = new external_1.TimestampAuthority({ + this.tsa = new tsa_1.TimestampAuthority({ baseURL: options.tsaBaseURL, retry: options.retry, timeout: options.timeout, @@ -36,11 +36,7 @@ class TSAClient { return await this.tsa.createTimestamp(request); } catch (err) { - throw new error_1.InternalError({ - code: 'TSA_CREATE_TIMESTAMP_ERROR', - message: 'error creating timestamp', - cause: err, - }); + (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); } } } diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js new file mode 100644 index 00000000000000..d4f5c7c859d106 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js @@ -0,0 +1,44 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const client_1 = require("./client"); +class TSAWitness { + constructor(options) { + this.tsa = new client_1.TSAClient({ + tsaBaseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async testify(content) { + const signature = extractSignature(content); + const timestamp = await this.tsa.createTimestamp(signature); + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; + } +} +exports.TSAWitness = TSAWitness; +function extractSignature(content) { + switch (content.$case) { + case 'dsseEnvelope': + return content.dsseEnvelope.signatures[0].sig; + case 'messageSignature': + return content.messageSignature.signature; + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js new file mode 100644 index 00000000000000..c8ad2e549bdc68 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/deps/npm/node_modules/@sigstore/sign/package.json b/deps/npm/node_modules/@sigstore/sign/package.json new file mode 100644 index 00000000000000..cd8dc14412e4da --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/package.json @@ -0,0 +1,42 @@ +{ + "name": "@sigstore/sign", + "version": "2.1.0", + "description": "Sigstore signing library", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.4.0", + "@sigstore/rekor-types": "^2.0.0", + "@types/make-fetch-happen": "^10.0.0" + }, + "dependencies": { + "@sigstore/bundle": "^2.1.0", + "@sigstore/protobuf-specs": "^0.2.1", + "make-fetch-happen": "^13.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } +} diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/client.js b/deps/npm/node_modules/@sigstore/tuf/dist/client.js index 08d6b61840909f..797346d39e6202 100644 --- a/deps/npm/node_modules/@sigstore/tuf/dist/client.js +++ b/deps/npm/node_modules/@sigstore/tuf/dist/client.js @@ -76,21 +76,8 @@ function initClient(cachePath, remote, options) { const baseURL = remote.mirror; const config = { fetchTimeout: options.timeout, + fetchRetry: options.retry, }; - // tuf-js only supports a number for fetchRetries so we have to - // convert the boolean and object options to a number. - /* istanbul ignore if */ - if (typeof options.retry !== 'undefined') { - if (typeof options.retry === 'number') { - config.fetchRetries = options.retry; - } - else if (typeof options.retry === 'object') { - config.fetchRetries = options.retry.retries; - } - else if (options.retry === true) { - config.fetchRetries = 1; - } - } return new tuf_js_1.Updater({ metadataBaseUrl: baseURL, targetBaseUrl: `${baseURL}/targets`, diff --git a/deps/npm/node_modules/@sigstore/tuf/package.json b/deps/npm/node_modules/@sigstore/tuf/package.json index 286d481a4d39fc..a655d52a0407a3 100644 --- a/deps/npm/node_modules/@sigstore/tuf/package.json +++ b/deps/npm/node_modules/@sigstore/tuf/package.json @@ -1,6 +1,6 @@ { "name": "@sigstore/tuf", - "version": "1.0.2", + "version": "2.1.0", "description": "Client for the Sigstore TUF repository", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -28,14 +28,14 @@ }, "devDependencies": { "@sigstore/jest": "^0.0.0", - "@tufjs/repo-mock": "^1.1.0", + "@tufjs/repo-mock": "^2.0.0", "@types/make-fetch-happen": "^10.0.0" }, "dependencies": { - "@sigstore/protobuf-specs": "^0.1.0", - "tuf-js": "^1.1.7" + "@sigstore/protobuf-specs": "^0.2.1", + "tuf-js": "^2.1.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/deps/npm/node_modules/@tufjs/canonical-json/package.json b/deps/npm/node_modules/@tufjs/canonical-json/package.json index 688c9b93c3a4e6..886c0c3969225a 100644 --- a/deps/npm/node_modules/@tufjs/canonical-json/package.json +++ b/deps/npm/node_modules/@tufjs/canonical-json/package.json @@ -1,6 +1,6 @@ { "name": "@tufjs/canonical-json", - "version": "1.0.0", + "version": "2.0.0", "description": "OLPC JSON canonicalization", "main": "lib/index.js", "typings": "lib/index.d.ts", @@ -19,7 +19,7 @@ "type": "git", "url": "git+https://github.com/theupdateframework/tuf-js.git" }, - "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme", + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme", "bugs": { "url": "https://github.com/theupdateframework/tuf-js/issues" }, @@ -29,11 +29,7 @@ "scripts": { "test": "jest" }, - "devDependencies": { - "@types/node": "^18.14.1", - "typescript": "^4.9.5" - }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/deps/npm/node_modules/@tufjs/models/dist/base.js b/deps/npm/node_modules/@tufjs/models/dist/base.js index d89a089c330922..259f6799c13a0d 100644 --- a/deps/npm/node_modules/@tufjs/models/dist/base.js +++ b/deps/npm/node_modules/@tufjs/models/dist/base.js @@ -14,7 +14,7 @@ var MetadataKind; MetadataKind["Timestamp"] = "timestamp"; MetadataKind["Snapshot"] = "snapshot"; MetadataKind["Targets"] = "targets"; -})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); +})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); function isMetadataKind(value) { return (typeof value === 'string' && Object.values(MetadataKind).includes(value)); diff --git a/deps/npm/node_modules/@tufjs/models/package.json b/deps/npm/node_modules/@tufjs/models/package.json index 6711ee0dababca..60368242ab556a 100644 --- a/deps/npm/node_modules/@tufjs/models/package.json +++ b/deps/npm/node_modules/@tufjs/models/package.json @@ -1,6 +1,6 @@ { "name": "@tufjs/models", - "version": "1.0.4", + "version": "2.0.0", "description": "TUF metadata models", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -27,15 +27,11 @@ "url": "https://github.com/theupdateframework/tuf-js/issues" }, "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", - "devDependencies": { - "@types/node": "^18.16.3", - "typescript": "^5.0.4" - }, "dependencies": { - "@tufjs/canonical-json": "1.0.0", - "minimatch": "^9.0.0" + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.3" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } } diff --git a/deps/npm/node_modules/agentkeepalive/lib/agent.js b/deps/npm/node_modules/agentkeepalive/lib/agent.js index a7065b5e5d1ad3..8bd354effa05ec 100644 --- a/deps/npm/node_modules/agentkeepalive/lib/agent.js +++ b/deps/npm/node_modules/agentkeepalive/lib/agent.js @@ -2,8 +2,7 @@ const OriginalAgent = require('http').Agent; const ms = require('humanize-ms'); -const debug = require('debug')('agentkeepalive'); -const deprecate = require('depd')('agentkeepalive'); +const debug = require('util').debuglog('agentkeepalive'); const { INIT_SOCKET, CURRENT_ID, @@ -27,6 +26,10 @@ if (majorVersion >= 11 && majorVersion <= 12) { defaultTimeoutListenerCount = 3; } +function deprecate(message) { + console.log('[agentkeepalive:deprecated] %s', message); +} + class Agent extends OriginalAgent { constructor(options) { options = options || {}; @@ -230,6 +233,7 @@ class Agent extends OriginalAgent { const newSocket = super.createConnection(options, onNewCreate); if (newSocket) onNewCreate(null, newSocket); + return newSocket; } get statusChanged() { diff --git a/deps/npm/node_modules/agentkeepalive/lib/https_agent.js b/deps/npm/node_modules/agentkeepalive/lib/https_agent.js index 73f529d65e7ffe..344fb32cadd862 100644 --- a/deps/npm/node_modules/agentkeepalive/lib/https_agent.js +++ b/deps/npm/node_modules/agentkeepalive/lib/https_agent.js @@ -25,8 +25,8 @@ class HttpsAgent extends HttpAgent { }; } - createConnection(options) { - const socket = this[CREATE_HTTPS_CONNECTION](options); + createConnection(options, oncreate) { + const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate); this[INIT_SOCKET](socket, options); return socket; } diff --git a/deps/npm/node_modules/agentkeepalive/package.json b/deps/npm/node_modules/agentkeepalive/package.json index 3115fee69a0416..d8e9aa7160d0b3 100644 --- a/deps/npm/node_modules/agentkeepalive/package.json +++ b/deps/npm/node_modules/agentkeepalive/package.json @@ -1,6 +1,6 @@ { "name": "agentkeepalive", - "version": "4.3.0", + "version": "4.5.0", "description": "Missing keepalive http.Agent", "main": "index.js", "browser": "browser.js", @@ -14,7 +14,7 @@ "contributor": "git-contributor", "test": "npm run lint && egg-bin test --full-trace", "test-local": "egg-bin test --full-trace", - "cov": "cross-env DEBUG=agentkeepalive egg-bin cov --full-trace", + "cov": "cross-env NODE_DEBUG=agentkeepalive egg-bin cov --full-trace", "ci": "npm run lint && npm run cov", "lint": "eslint lib test index.js" }, @@ -35,8 +35,6 @@ "HttpsAgent" ], "dependencies": { - "debug": "^4.1.0", - "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "devDependencies": { diff --git a/deps/npm/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/cacache/lib/memoization.js index 0ff604a479c9c1..2ecc60912e4563 100644 --- a/deps/npm/node_modules/cacache/lib/memoization.js +++ b/deps/npm/node_modules/cacache/lib/memoization.js @@ -1,8 +1,8 @@ 'use strict' -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') -const MEMOIZED = new LRU({ +const MEMOIZED = new LRUCache({ max: 500, maxSize: 50 * 1024 * 1024, // 50MB ttl: 3 * 60 * 1000, // 3 minutes diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index a6f6f9bdfc4654..1b14bf4bd14904 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.3", + "version": "18.0.0", "cache-version": { "content": "2", "index": "5" @@ -48,8 +48,8 @@ "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", @@ -60,17 +60,23 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.15.1", - "publish": "true" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "author": "GitHub Inc.", "tap": { diff --git a/deps/npm/node_modules/depd/History.md b/deps/npm/node_modules/depd/History.md deleted file mode 100644 index cd9ebaaa9963f7..00000000000000 --- a/deps/npm/node_modules/depd/History.md +++ /dev/null @@ -1,103 +0,0 @@ -2.0.0 / 2018-10-26 -================== - - * Drop support for Node.js 0.6 - * Replace internal `eval` usage with `Function` constructor - * Use instance methods on `process` to check for listeners - -1.1.2 / 2018-01-11 -================== - - * perf: remove argument reassignment - * Support Node.js 0.6 to 9.x - -1.1.1 / 2017-07-27 -================== - - * Remove unnecessary `Buffer` loading - * Support Node.js 0.6 to 8.x - -1.1.0 / 2015-09-14 -================== - - * Enable strict mode in more places - * Support io.js 3.x - * Support io.js 2.x - * Support web browser loading - - Requires bundler like Browserify or webpack - -1.0.1 / 2015-04-07 -================== - - * Fix `TypeError`s when under `'use strict'` code - * Fix useless type name on auto-generated messages - * Support io.js 1.x - * Support Node.js 0.12 - -1.0.0 / 2014-09-17 -================== - - * No changes - -0.4.5 / 2014-09-09 -================== - - * Improve call speed to functions using the function wrapper - * Support Node.js 0.6 - -0.4.4 / 2014-07-27 -================== - - * Work-around v8 generating empty stack traces - -0.4.3 / 2014-07-26 -================== - - * Fix exception when global `Error.stackTraceLimit` is too low - -0.4.2 / 2014-07-19 -================== - - * Correct call site for wrapped functions and properties - -0.4.1 / 2014-07-19 -================== - - * Improve automatic message generation for function properties - -0.4.0 / 2014-07-19 -================== - - * Add `TRACE_DEPRECATION` environment variable - * Remove non-standard grey color from color output - * Support `--no-deprecation` argument - * Support `--trace-deprecation` argument - * Support `deprecate.property(fn, prop, message)` - -0.3.0 / 2014-06-16 -================== - - * Add `NO_DEPRECATION` environment variable - -0.2.0 / 2014-06-15 -================== - - * Add `deprecate.property(obj, prop, message)` - * Remove `supports-color` dependency for node.js 0.8 - -0.1.0 / 2014-06-15 -================== - - * Add `deprecate.function(fn, message)` - * Add `process.on('deprecation', fn)` emitter - * Automatically generate message when omitted from `deprecate()` - -0.0.1 / 2014-06-15 -================== - - * Fix warning for dynamic calls at singe call site - -0.0.0 / 2014-06-15 -================== - - * Initial implementation diff --git a/deps/npm/node_modules/depd/index.js b/deps/npm/node_modules/depd/index.js deleted file mode 100644 index 1bf2fcfdeffc98..00000000000000 --- a/deps/npm/node_modules/depd/index.js +++ /dev/null @@ -1,538 +0,0 @@ -/*! - * depd - * Copyright(c) 2014-2018 Douglas Christopher Wilson - * MIT Licensed - */ - -/** - * Module dependencies. - */ - -var relative = require('path').relative - -/** - * Module exports. - */ - -module.exports = depd - -/** - * Get the path to base files on. - */ - -var basePath = process.cwd() - -/** - * Determine if namespace is contained in the string. - */ - -function containsNamespace (str, namespace) { - var vals = str.split(/[ ,]+/) - var ns = String(namespace).toLowerCase() - - for (var i = 0; i < vals.length; i++) { - var val = vals[i] - - // namespace contained - if (val && (val === '*' || val.toLowerCase() === ns)) { - return true - } - } - - return false -} - -/** - * Convert a data descriptor to accessor descriptor. - */ - -function convertDataDescriptorToAccessor (obj, prop, message) { - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - var value = descriptor.value - - descriptor.get = function getter () { return value } - - if (descriptor.writable) { - descriptor.set = function setter (val) { return (value = val) } - } - - delete descriptor.value - delete descriptor.writable - - Object.defineProperty(obj, prop, descriptor) - - return descriptor -} - -/** - * Create arguments string to keep arity. - */ - -function createArgumentsString (arity) { - var str = '' - - for (var i = 0; i < arity; i++) { - str += ', arg' + i - } - - return str.substr(2) -} - -/** - * Create stack string from stack. - */ - -function createStackString (stack) { - var str = this.name + ': ' + this.namespace - - if (this.message) { - str += ' deprecated ' + this.message - } - - for (var i = 0; i < stack.length; i++) { - str += '\n at ' + stack[i].toString() - } - - return str -} - -/** - * Create deprecate for namespace in caller. - */ - -function depd (namespace) { - if (!namespace) { - throw new TypeError('argument namespace is required') - } - - var stack = getStack() - var site = callSiteLocation(stack[1]) - var file = site[0] - - function deprecate (message) { - // call to self as log - log.call(deprecate, message) - } - - deprecate._file = file - deprecate._ignored = isignored(namespace) - deprecate._namespace = namespace - deprecate._traced = istraced(namespace) - deprecate._warned = Object.create(null) - - deprecate.function = wrapfunction - deprecate.property = wrapproperty - - return deprecate -} - -/** - * Determine if event emitter has listeners of a given type. - * - * The way to do this check is done three different ways in Node.js >= 0.8 - * so this consolidates them into a minimal set using instance methods. - * - * @param {EventEmitter} emitter - * @param {string} type - * @returns {boolean} - * @private - */ - -function eehaslisteners (emitter, type) { - var count = typeof emitter.listenerCount !== 'function' - ? emitter.listeners(type).length - : emitter.listenerCount(type) - - return count > 0 -} - -/** - * Determine if namespace is ignored. - */ - -function isignored (namespace) { - if (process.noDeprecation) { - // --no-deprecation support - return true - } - - var str = process.env.NO_DEPRECATION || '' - - // namespace ignored - return containsNamespace(str, namespace) -} - -/** - * Determine if namespace is traced. - */ - -function istraced (namespace) { - if (process.traceDeprecation) { - // --trace-deprecation support - return true - } - - var str = process.env.TRACE_DEPRECATION || '' - - // namespace traced - return containsNamespace(str, namespace) -} - -/** - * Display deprecation message. - */ - -function log (message, site) { - var haslisteners = eehaslisteners(process, 'deprecation') - - // abort early if no destination - if (!haslisteners && this._ignored) { - return - } - - var caller - var callFile - var callSite - var depSite - var i = 0 - var seen = false - var stack = getStack() - var file = this._file - - if (site) { - // provided site - depSite = site - callSite = callSiteLocation(stack[1]) - callSite.name = depSite.name - file = callSite[0] - } else { - // get call site - i = 2 - depSite = callSiteLocation(stack[i]) - callSite = depSite - } - - // get caller of deprecated thing in relation to file - for (; i < stack.length; i++) { - caller = callSiteLocation(stack[i]) - callFile = caller[0] - - if (callFile === file) { - seen = true - } else if (callFile === this._file) { - file = this._file - } else if (seen) { - break - } - } - - var key = caller - ? depSite.join(':') + '__' + caller.join(':') - : undefined - - if (key !== undefined && key in this._warned) { - // already warned - return - } - - this._warned[key] = true - - // generate automatic message from call site - var msg = message - if (!msg) { - msg = callSite === depSite || !callSite.name - ? defaultMessage(depSite) - : defaultMessage(callSite) - } - - // emit deprecation if listeners exist - if (haslisteners) { - var err = DeprecationError(this._namespace, msg, stack.slice(i)) - process.emit('deprecation', err) - return - } - - // format and write message - var format = process.stderr.isTTY - ? formatColor - : formatPlain - var output = format.call(this, msg, caller, stack.slice(i)) - process.stderr.write(output + '\n', 'utf8') -} - -/** - * Get call site location as array. - */ - -function callSiteLocation (callSite) { - var file = callSite.getFileName() || '' - var line = callSite.getLineNumber() - var colm = callSite.getColumnNumber() - - if (callSite.isEval()) { - file = callSite.getEvalOrigin() + ', ' + file - } - - var site = [file, line, colm] - - site.callSite = callSite - site.name = callSite.getFunctionName() - - return site -} - -/** - * Generate a default message from the site. - */ - -function defaultMessage (site) { - var callSite = site.callSite - var funcName = site.name - - // make useful anonymous name - if (!funcName) { - funcName = '' - } - - var context = callSite.getThis() - var typeName = context && callSite.getTypeName() - - // ignore useless type name - if (typeName === 'Object') { - typeName = undefined - } - - // make useful type name - if (typeName === 'Function') { - typeName = context.name || typeName - } - - return typeName && callSite.getMethodName() - ? typeName + '.' + funcName - : funcName -} - -/** - * Format deprecation message without color. - */ - -function formatPlain (msg, caller, stack) { - var timestamp = new Date().toUTCString() - - var formatted = timestamp + - ' ' + this._namespace + - ' deprecated ' + msg - - // add stack trace - if (this._traced) { - for (var i = 0; i < stack.length; i++) { - formatted += '\n at ' + stack[i].toString() - } - - return formatted - } - - if (caller) { - formatted += ' at ' + formatLocation(caller) - } - - return formatted -} - -/** - * Format deprecation message with color. - */ - -function formatColor (msg, caller, stack) { - var formatted = '\x1b[36;1m' + this._namespace + '\x1b[22;39m' + // bold cyan - ' \x1b[33;1mdeprecated\x1b[22;39m' + // bold yellow - ' \x1b[0m' + msg + '\x1b[39m' // reset - - // add stack trace - if (this._traced) { - for (var i = 0; i < stack.length; i++) { - formatted += '\n \x1b[36mat ' + stack[i].toString() + '\x1b[39m' // cyan - } - - return formatted - } - - if (caller) { - formatted += ' \x1b[36m' + formatLocation(caller) + '\x1b[39m' // cyan - } - - return formatted -} - -/** - * Format call site location. - */ - -function formatLocation (callSite) { - return relative(basePath, callSite[0]) + - ':' + callSite[1] + - ':' + callSite[2] -} - -/** - * Get the stack as array of call sites. - */ - -function getStack () { - var limit = Error.stackTraceLimit - var obj = {} - var prep = Error.prepareStackTrace - - Error.prepareStackTrace = prepareObjectStackTrace - Error.stackTraceLimit = Math.max(10, limit) - - // capture the stack - Error.captureStackTrace(obj) - - // slice this function off the top - var stack = obj.stack.slice(1) - - Error.prepareStackTrace = prep - Error.stackTraceLimit = limit - - return stack -} - -/** - * Capture call site stack from v8. - */ - -function prepareObjectStackTrace (obj, stack) { - return stack -} - -/** - * Return a wrapped function in a deprecation message. - */ - -function wrapfunction (fn, message) { - if (typeof fn !== 'function') { - throw new TypeError('argument fn must be a function') - } - - var args = createArgumentsString(fn.length) - var stack = getStack() - var site = callSiteLocation(stack[1]) - - site.name = fn.name - - // eslint-disable-next-line no-new-func - var deprecatedfn = new Function('fn', 'log', 'deprecate', 'message', 'site', - '"use strict"\n' + - 'return function (' + args + ') {' + - 'log.call(deprecate, message, site)\n' + - 'return fn.apply(this, arguments)\n' + - '}')(fn, log, this, message, site) - - return deprecatedfn -} - -/** - * Wrap property in a deprecation message. - */ - -function wrapproperty (obj, prop, message) { - if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { - throw new TypeError('argument obj must be object') - } - - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - - if (!descriptor) { - throw new TypeError('must call property on owner object') - } - - if (!descriptor.configurable) { - throw new TypeError('property must be configurable') - } - - var deprecate = this - var stack = getStack() - var site = callSiteLocation(stack[1]) - - // set site name - site.name = prop - - // convert data descriptor - if ('value' in descriptor) { - descriptor = convertDataDescriptorToAccessor(obj, prop, message) - } - - var get = descriptor.get - var set = descriptor.set - - // wrap getter - if (typeof get === 'function') { - descriptor.get = function getter () { - log.call(deprecate, message, site) - return get.apply(this, arguments) - } - } - - // wrap setter - if (typeof set === 'function') { - descriptor.set = function setter () { - log.call(deprecate, message, site) - return set.apply(this, arguments) - } - } - - Object.defineProperty(obj, prop, descriptor) -} - -/** - * Create DeprecationError for deprecation - */ - -function DeprecationError (namespace, message, stack) { - var error = new Error() - var stackString - - Object.defineProperty(error, 'constructor', { - value: DeprecationError - }) - - Object.defineProperty(error, 'message', { - configurable: true, - enumerable: false, - value: message, - writable: true - }) - - Object.defineProperty(error, 'name', { - enumerable: false, - configurable: true, - value: 'DeprecationError', - writable: true - }) - - Object.defineProperty(error, 'namespace', { - configurable: true, - enumerable: false, - value: namespace, - writable: true - }) - - Object.defineProperty(error, 'stack', { - configurable: true, - enumerable: false, - get: function () { - if (stackString !== undefined) { - return stackString - } - - // prepare stack trace - return (stackString = createStackString.call(this, stack)) - }, - set: function setter (val) { - stackString = val - } - }) - - return error -} diff --git a/deps/npm/node_modules/depd/lib/browser/index.js b/deps/npm/node_modules/depd/lib/browser/index.js deleted file mode 100644 index 6be45cc20b33f2..00000000000000 --- a/deps/npm/node_modules/depd/lib/browser/index.js +++ /dev/null @@ -1,77 +0,0 @@ -/*! - * depd - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - * @public - */ - -module.exports = depd - -/** - * Create deprecate for namespace in caller. - */ - -function depd (namespace) { - if (!namespace) { - throw new TypeError('argument namespace is required') - } - - function deprecate (message) { - // no-op in browser - } - - deprecate._file = undefined - deprecate._ignored = true - deprecate._namespace = namespace - deprecate._traced = false - deprecate._warned = Object.create(null) - - deprecate.function = wrapfunction - deprecate.property = wrapproperty - - return deprecate -} - -/** - * Return a wrapped function in a deprecation message. - * - * This is a no-op version of the wrapper, which does nothing but call - * validation. - */ - -function wrapfunction (fn, message) { - if (typeof fn !== 'function') { - throw new TypeError('argument fn must be a function') - } - - return fn -} - -/** - * Wrap property in a deprecation message. - * - * This is a no-op version of the wrapper, which does nothing but call - * validation. - */ - -function wrapproperty (obj, prop, message) { - if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { - throw new TypeError('argument obj must be object') - } - - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - - if (!descriptor) { - throw new TypeError('must call property on owner object') - } - - if (!descriptor.configurable) { - throw new TypeError('property must be configurable') - } -} diff --git a/deps/npm/node_modules/depd/package.json b/deps/npm/node_modules/depd/package.json deleted file mode 100644 index 3857e199184a0a..00000000000000 --- a/deps/npm/node_modules/depd/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "depd", - "description": "Deprecate all the things", - "version": "2.0.0", - "author": "Douglas Christopher Wilson ", - "license": "MIT", - "keywords": [ - "deprecate", - "deprecated" - ], - "repository": "dougwilson/nodejs-depd", - "browser": "lib/browser/index.js", - "devDependencies": { - "benchmark": "2.1.4", - "beautify-benchmark": "0.2.4", - "eslint": "5.7.0", - "eslint-config-standard": "12.0.0", - "eslint-plugin-import": "2.14.0", - "eslint-plugin-markdown": "1.0.0-beta.7", - "eslint-plugin-node": "7.0.1", - "eslint-plugin-promise": "4.0.1", - "eslint-plugin-standard": "4.0.0", - "istanbul": "0.4.5", - "mocha": "5.2.0", - "safe-buffer": "5.1.2", - "uid-safe": "2.1.5" - }, - "files": [ - "lib/", - "History.md", - "LICENSE", - "index.js", - "Readme.md" - ], - "engines": { - "node": ">= 0.8" - }, - "scripts": { - "bench": "node benchmark/index.js", - "lint": "eslint --plugin markdown --ext js,md .", - "test": "mocha --reporter spec --bail test/", - "test-ci": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter spec test/ && istanbul report lcovonly text-summary", - "test-cov": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter dot test/ && istanbul report lcov text-summary" - } -} diff --git a/deps/npm/node_modules/fs-minipass/package.json b/deps/npm/node_modules/fs-minipass/package.json index 3d1fa3dbc11e46..e501e6474294d8 100644 --- a/deps/npm/node_modules/fs-minipass/package.json +++ b/deps/npm/node_modules/fs-minipass/package.json @@ -1,6 +1,6 @@ { "name": "fs-minipass", - "version": "3.0.2", + "version": "3.0.3", "main": "lib/index.js", "scripts": { "test": "tap", @@ -24,11 +24,11 @@ "homepage": "https://github.com/npm/fs-minipass#readme", "description": "fs read and write streams based on minipass", "dependencies": { - "minipass": "^5.0.0" + "minipass": "^7.0.3" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "mutate-fs": "^2.1.1", "tap": "^16.3.2" }, @@ -48,7 +48,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.18.0", "publish": "true" } } diff --git a/deps/npm/node_modules/glob/README.md b/deps/npm/node_modules/glob/README.md index 892013baae771c..1bde1494664d4d 100644 --- a/deps/npm/node_modules/glob/README.md +++ b/deps/npm/node_modules/glob/README.md @@ -55,7 +55,7 @@ const filesStream = globStream(['**/*.dat', 'logs/**/*.log']) // construct a Glob object if you wanna do it that way, which // allows for much faster walks if you have to look in the same // folder multiple times. -const g = new Glob('**/foo') +const g = new Glob('**/foo', {}) // glob objects are async iterators, can also do globIterate() or // g.iterate(), same deal for await (const file of g) { @@ -358,6 +358,8 @@ An object that can perform glob pattern traversals. ### `const g = new Glob(pattern: string | string[], options: GlobOptions)` +Options object is required. + See full options descriptions below. Note that a previous `Glob` object can be passed as the diff --git a/deps/npm/node_modules/glob/dist/cjs/package.json b/deps/npm/node_modules/glob/dist/cjs/package.json index 44b67c307f1c85..c15df94a3582bf 100644 --- a/deps/npm/node_modules/glob/dist/cjs/package.json +++ b/deps/npm/node_modules/glob/dist/cjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.7", + "version": "10.3.3", "type": "commonjs" } diff --git a/deps/npm/node_modules/glob/dist/cjs/src/bin.js b/deps/npm/node_modules/glob/dist/cjs/src/bin.js index 733358c7365be8..4a8a88f2734d2e 100755 --- a/deps/npm/node_modules/glob/dist/cjs/src/bin.js +++ b/deps/npm/node_modules/glob/dist/cjs/src/bin.js @@ -4,10 +4,10 @@ Object.defineProperty(exports, "__esModule", { value: true }); const foreground_child_1 = require("foreground-child"); const fs_1 = require("fs"); const jackspeak_1 = require("jackspeak"); -const index_js_1 = require("./index.js"); const package_json_1 = require("../package.json"); +const index_js_1 = require("./index.js"); const j = (0, jackspeak_1.jack)({ - usage: 'glob [options] [ [ ...]]' + usage: 'glob [options] [ [ ...]]', }) .description(` Glob v${package_json_1.version} @@ -22,6 +22,14 @@ const j = (0, jackspeak_1.jack)({ description: `Run the command provided, passing the glob expression matches as arguments.`, }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, }) .flag({ all: { @@ -214,8 +222,10 @@ try { console.log(j.usage()); process.exit(0); } - if (positionals.length === 0) + if (positionals.length === 0 && !values.default) throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); const patterns = values.all ? positionals : positionals.filter(p => !(0, fs_1.existsSync)(p)); diff --git a/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map b/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map index abd2aa47d82d3c..e189acfd01b1a7 100644 --- a/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map +++ b/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map @@ -1 +1 @@ -{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,yCAAuC;AACvC,kDAAyC;AAEzC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC;QAAE,MAAM,sBAAsB,CAAA;IAC1D,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { globStream } from './index.js'\nimport { version } from '../package.json'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]'\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0) throw 'No patterns provided'\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]} \ No newline at end of file +{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,kDAAyC;AACzC,yCAAuC;AAEvC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { version } from '../package.json'\nimport { globStream } from './index.js'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map b/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map index b0ea3b71e222ad..6353d8b3c47126 100644 --- a/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map +++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAqHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.js b/deps/npm/node_modules/glob/dist/cjs/src/glob.js index e7ad4deb980d30..eb37c6b9a6601e 100644 --- a/deps/npm/node_modules/glob/dist/cjs/src/glob.js +++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.js @@ -62,6 +62,10 @@ class Glob { * again. */ constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ this.withFileTypes = !!opts.withFileTypes; this.signal = opts.signal; this.follow = !!opts.follow; diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map b/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map index bf6fb4d0f0b724..7a7a9b28627480 100644 --- a/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map +++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map @@ -1 +1 @@ -{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AAlQD,oBAkQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AArQD,oBAqQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map b/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map index b06e4633443c87..d45258ac24a580 100644 --- a/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map +++ b/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAqHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.js b/deps/npm/node_modules/glob/dist/mjs/glob.js index f158065746e586..8ff26154427be9 100644 --- a/deps/npm/node_modules/glob/dist/mjs/glob.js +++ b/deps/npm/node_modules/glob/dist/mjs/glob.js @@ -59,6 +59,10 @@ export class Glob { * again. */ constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ this.withFileTypes = !!opts.withFileTypes; this.signal = opts.signal; this.follow = !!opts.follow; diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.js.map b/deps/npm/node_modules/glob/dist/mjs/glob.js.map index 93eb61df16f5ca..94558c1d2c66a4 100644 --- a/deps/npm/node_modules/glob/dist/mjs/glob.js.map +++ b/deps/npm/node_modules/glob/dist/mjs/glob.js.map @@ -1 +1 @@ -{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,eAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,gBAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,eAAe;wBACjB,CAAC,CAAC,UAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,eAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,gBAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,eAAe;wBACjB,CAAC,CAAC,UAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/glob/dist/mjs/package.json b/deps/npm/node_modules/glob/dist/mjs/package.json index ac4c42f81fbd84..5cc80943d565b7 100644 --- a/deps/npm/node_modules/glob/dist/mjs/package.json +++ b/deps/npm/node_modules/glob/dist/mjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.7", + "version": "10.3.3", "type": "module" } diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json index ba9732c0f6de59..2d25985d2bbb5d 100644 --- a/deps/npm/node_modules/glob/package.json +++ b/deps/npm/node_modules/glob/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (https://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.7", + "version": "10.3.3", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -62,11 +62,11 @@ "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2", - "path-scurry": "^1.7.0" + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" }, "devDependencies": { - "@types/node": "^20.2.1", + "@types/node": "^20.3.2", "@types/tap": "^15.0.7", "c8": "^7.12.0", "memfs": "^3.4.13", diff --git a/deps/npm/node_modules/hosted-git-info/lib/index.js b/deps/npm/node_modules/hosted-git-info/lib/index.js index a7339c217e9a33..0c9d0b08c866b5 100644 --- a/deps/npm/node_modules/hosted-git-info/lib/index.js +++ b/deps/npm/node_modules/hosted-git-info/lib/index.js @@ -1,11 +1,11 @@ 'use strict' -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') const hosts = require('./hosts.js') const fromUrl = require('./from-url.js') const parseUrl = require('./parse-url.js') -const cache = new LRU({ max: 1000 }) +const cache = new LRUCache({ max: 1000 }) class GitHost { constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json index 612259948afe73..262a6c20fcf00b 100644 --- a/deps/npm/node_modules/hosted-git-info/package.json +++ b/deps/npm/node_modules/hosted-git-info/package.json @@ -1,6 +1,6 @@ { "name": "hosted-git-info", - "version": "6.1.1", + "version": "7.0.0", "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", "main": "./lib/index.js", "repository": { @@ -30,11 +30,11 @@ "template-oss-apply": "template-oss-apply --force" }, "dependencies": { - "lru-cache": "^7.5.1" + "lru-cache": "^10.0.1" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.7.1", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "files": [ @@ -42,7 +42,7 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "tap": { "color": 1, @@ -54,6 +54,13 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.7.1" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json index e2cb1fe25ebba7..a164169a74df3c 100644 --- a/deps/npm/node_modules/init-package-json/package.json +++ b/deps/npm/node_modules/init-package-json/package.json @@ -1,6 +1,6 @@ { "name": "init-package-json", - "version": "5.0.0", + "version": "6.0.0", "main": "lib/init-package-json.js", "scripts": { "test": "tap", @@ -19,22 +19,22 @@ "license": "ISC", "description": "A node module to get your node module started", "dependencies": { - "npm-package-arg": "^10.0.0", + "npm-package-arg": "^11.0.0", "promzard": "^1.0.0", "read": "^2.0.0", - "read-package-json": "^6.0.0", + "read-package-json": "^7.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^5.0.0" }, "devDependencies": { - "@npmcli/config": "^6.0.0", + "@npmcli/config": "^7.0.0", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.3", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "tap": { "statements": 95, @@ -63,6 +63,13 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.3" + "version": "4.18.0", + "publish": true, + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index 713cf8c264c986..9185e364a37600 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -1,6 +1,6 @@ { "name": "libnpmaccess", - "version": "7.0.2", + "version": "8.0.0", "description": "programmatic library for `npm access` commands", "author": "GitHub Inc.", "license": "ISC", @@ -18,8 +18,8 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "nock": "^13.3.3", + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -29,11 +29,11 @@ "bugs": "https://github.com/npm/libnpmaccess/issues", "homepage": "https://npmjs.com/package/libnpmaccess", "dependencies": { - "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3" + "npm-package-arg": "^11.0.0", + "npm-registry-fetch": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "files": [ "bin/", @@ -42,7 +42,13 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index ce6eb3531b32ed..8fdf332e748377 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "5.0.19", + "version": "6.0.0", "description": "The registry diff", "repository": { "type": "git", @@ -13,7 +13,7 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "keywords": [ "npm", @@ -43,23 +43,29 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^6.3.0", + "@npmcli/arborist": "^7.0.0", "@npmcli/disparity-colors": "^3.0.0", "@npmcli/installed-package-contents": "^2.0.2", "binary-extensions": "^2.2.0", "diff": "^5.1.0", "minimatch": "^9.0.0", - "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8", + "npm-package-arg": "^11.0.0", + "pacote": "^17.0.4", "tar": "^6.1.13" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index 9b86b81a998ef7..dd515abf654d8d 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,13 +1,13 @@ { "name": "libnpmexec", - "version": "6.0.3", + "version": "7.0.0", "files": [ "bin/", "lib/" ], "main": "lib/index.js", "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "description": "npm exec (npx) programmatic API", "repository": { @@ -56,15 +56,15 @@ "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^6.3.0", - "@npmcli/run-script": "^6.0.0", + "@npmcli/arborist": "^7.0.0", + "@npmcli/run-script": "^7.0.1", "ci-info": "^3.7.1", - "npm-package-arg": "^10.1.0", + "npm-package-arg": "^11.0.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^17.0.4", "proc-log": "^3.0.0", "read": "^2.0.0", "read-package-json-fast": "^3.0.2", @@ -74,6 +74,12 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 0c863c2f92203a..21bd395d63ead7 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "4.0.19", + "version": "4.1.0", "main": "lib/index.js", "files": [ "bin/", @@ -42,10 +42,10 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^6.3.0" + "@npmcli/arborist": "^7.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 05b34dda75c416..a45f37652a804b 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "9.0.3", + "version": "10.0.0", "description": "programmatic API for managing npm registry hooks", "main": "lib/index.js", "files": [ @@ -31,21 +31,27 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^16.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "nock": "^13.3.3", + "tap": "^16.3.8" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 675d03b5b2437a..f1964bca7eeb9b 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "5.0.4", + "version": "6.0.0", "description": "Programmatic api for `npm org` commands", "author": "GitHub Inc.", "main": "lib/index.js", @@ -29,9 +29,9 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "minipass": "^5.0.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "minipass": "^7.0.3", + "nock": "^13.3.3", + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -42,15 +42,21 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index d8861c337c4d99..1f81dfa48b8a8a 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "5.0.19", + "version": "6.0.0", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -24,9 +24,9 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "nock": "^13.3.0", + "nock": "^13.3.3", "spawk": "^1.7.1", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -36,18 +36,24 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^6.3.0", - "@npmcli/run-script": "^6.0.0", - "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8" + "@npmcli/arborist": "^7.0.0", + "@npmcli/run-script": "^7.0.1", + "npm-package-arg": "^11.0.0", + "pacote": "^17.0.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmpublish/lib/provenance.js b/deps/npm/node_modules/libnpmpublish/lib/provenance.js index 398db1b4cd4671..45fe963d5f36f2 100644 --- a/deps/npm/node_modules/libnpmpublish/lib/provenance.js +++ b/deps/npm/node_modules/libnpmpublish/lib/provenance.js @@ -1,4 +1,4 @@ -const { sigstore } = require('sigstore') +const sigstore = require('sigstore') const { readFile } = require('fs/promises') const ci = require('ci-info') const { env } = process diff --git a/deps/npm/node_modules/libnpmpublish/lib/publish.js b/deps/npm/node_modules/libnpmpublish/lib/publish.js index 554eb9bec46f8c..b0ef782a166c66 100644 --- a/deps/npm/node_modules/libnpmpublish/lib/publish.js +++ b/deps/npm/node_modules/libnpmpublish/lib/publish.js @@ -50,42 +50,16 @@ Remove the 'private' field from the package.json to publish it.`), opts ) - try { - const res = await npmFetch(spec.escapedName, { - ...opts, - method: 'PUT', - body: metadata, - ignoreBody: true, - }) - if (transparencyLogUrl) { - res.transparencyLogUrl = transparencyLogUrl - } - return res - } catch (err) { - if (err.code !== 'E409') { - throw err - } - // if E409, we attempt exactly ONE retry, to protect us - // against malicious activity like trying to publish - // a bunch of new versions of a package at the same time - // and/or spamming the registry - const current = await npmFetch.json(spec.escapedName, { - ...opts, - query: { write: true }, - }) - const newMetadata = patchMetadata(current, metadata) - const res = await npmFetch(spec.escapedName, { - ...opts, - method: 'PUT', - body: newMetadata, - ignoreBody: true, - }) - /* istanbul ignore next */ - if (transparencyLogUrl) { - res.transparencyLogUrl = transparencyLogUrl - } - return res + const res = await npmFetch(spec.escapedName, { + ...opts, + method: 'PUT', + body: metadata, + ignoreBody: true, + }) + if (transparencyLogUrl) { + res.transparencyLogUrl = transparencyLogUrl } + return res } const patchManifest = (_manifest, opts) => { @@ -195,51 +169,6 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { } } -const patchMetadata = (current, newData) => { - const curVers = Object.keys(current.versions || {}) - .map(v => semver.clean(v, true)) - .concat(Object.keys(current.time || {}) - .map(v => semver.valid(v, true) && semver.clean(v, true)) - .filter(v => v)) - - const newVersion = Object.keys(newData.versions)[0] - - if (curVers.indexOf(newVersion) !== -1) { - const { name: pkgid, version } = newData - throw Object.assign( - new Error( - `Cannot publish ${pkgid}@${version} over existing version.` - ), { - code: 'EPUBLISHCONFLICT', - pkgid, - version, - }) - } - - current.versions = current.versions || {} - current.versions[newVersion] = newData.versions[newVersion] - for (const i in newData) { - switch (i) { - // objects that copy over the new stuffs - case 'dist-tags': - case 'versions': - case '_attachments': - for (const j in newData[i]) { - current[i] = current[i] || {} - current[i][j] = newData[i][j] - } - break - - // copy - default: - current[i] = newData[i] - break - } - } - - return current -} - // Check that all the prereqs are met for provenance generation const ensureProvenanceGeneration = async (registry, spec, opts) => { if (ciInfo.GITHUB_ACTIONS) { diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index 7c7533a82c735f..3dcaf98e84782d 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "7.5.0", + "version": "9.0.0", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -27,9 +27,8 @@ "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", - "lodash.clonedeep": "^4.5.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "nock": "^13.3.3", + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -40,21 +39,27 @@ "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { "ci-info": "^3.6.1", - "normalize-package-data": "^5.0.0", - "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3", + "normalize-package-data": "^6.0.0", + "npm-package-arg": "^11.0.0", + "npm-registry-fetch": "^16.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7", - "sigstore": "^1.4.0", - "ssri": "^10.0.1" + "sigstore": "^2.1.0", + "ssri": "^10.0.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 32cb1f21b64221..9bd45de5f62cdb 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "6.0.2", + "version": "7.0.0", "description": "Programmatic API for searching in npm and compatible registries.", "author": "GitHub Inc.", "main": "lib/index.js", @@ -27,8 +27,8 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "nock": "^13.3.3", + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -38,15 +38,21 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 33a77095fe8489..ca153ac301bf45 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -1,7 +1,7 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "5.0.3", + "version": "6.0.0", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", @@ -17,8 +17,8 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "nock": "^13.3.0", - "tap": "^16.3.4" + "nock": "^13.3.3", + "tap": "^16.3.8" }, "repository": { "type": "git", @@ -32,15 +32,21 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index 469f9c2bc00d67..6f3a5bd0b5155a 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "4.0.2", + "version": "5.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -34,21 +34,27 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", "require-inject": "^1.4.4", - "tap": "^16.3.4" + "tap": "^16.3.8" }, "dependencies": { - "@npmcli/git": "^4.0.1", - "@npmcli/run-script": "^6.0.0", + "@npmcli/git": "^5.0.3", + "@npmcli/run-script": "^7.0.1", "json-parse-even-better-errors": "^3.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js b/deps/npm/node_modules/lru-cache/dist/cjs/index.js similarity index 98% rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js rename to deps/npm/node_modules/lru-cache/dist/cjs/index.js index e6c4f909292b3f..1d1f23a55ec4b4 100644 --- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js +++ b/deps/npm/node_modules/lru-cache/dist/cjs/index.js @@ -837,6 +837,15 @@ class LRUCache { if (v !== oldVal) { if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } } else if (!noDisposeOnSet) { if (this.#hasDispose) { @@ -1090,7 +1099,7 @@ class LRUCache { const pcall = (res, rej) => { const fmp = this.#fetchMethod?.(k, v, fetchOpts); if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v), rej); + fmp.then(v => res(v === undefined ? undefined : v), rej); } // ignored, we go until we finish, regardless. // defer check until we are actually aborting, @@ -1098,7 +1107,7 @@ class LRUCache { ac.signal.addEventListener('abort', () => { if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) { - res(); + res(undefined); // when it eventually resolves, update the cache. if (options.allowStaleOnFetchAbort) { res = v => cb(v, true); diff --git a/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js b/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 00000000000000..8d34a03041d25e --- /dev/null +++ b/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json b/deps/npm/node_modules/lru-cache/dist/cjs/package.json similarity index 100% rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json rename to deps/npm/node_modules/lru-cache/dist/cjs/package.json diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js b/deps/npm/node_modules/lru-cache/dist/mjs/index.js similarity index 98% rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js rename to deps/npm/node_modules/lru-cache/dist/mjs/index.js index 1d8a36931a45a8..79025471782531 100644 --- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js +++ b/deps/npm/node_modules/lru-cache/dist/mjs/index.js @@ -834,6 +834,15 @@ export class LRUCache { if (v !== oldVal) { if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } } else if (!noDisposeOnSet) { if (this.#hasDispose) { @@ -1087,7 +1096,7 @@ export class LRUCache { const pcall = (res, rej) => { const fmp = this.#fetchMethod?.(k, v, fetchOpts); if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v), rej); + fmp.then(v => res(v === undefined ? undefined : v), rej); } // ignored, we go until we finish, regardless. // defer check until we are actually aborting, @@ -1095,7 +1104,7 @@ export class LRUCache { ac.signal.addEventListener('abort', () => { if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) { - res(); + res(undefined); // when it eventually resolves, update the cache. if (options.allowStaleOnFetchAbort) { res = v => cb(v, true); diff --git a/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js b/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 00000000000000..5a16b3940d6df9 --- /dev/null +++ b/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json b/deps/npm/node_modules/lru-cache/dist/mjs/package.json similarity index 100% rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json rename to deps/npm/node_modules/lru-cache/dist/mjs/package.json diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json index 9684991727e7a2..bae4a04839d1f7 100644 --- a/deps/npm/node_modules/lru-cache/package.json +++ b/deps/npm/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.18.3", + "version": "10.0.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,60 +11,74 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "size": "size-limit", - "test": "tap", - "snap": "tap", + "test": "c8 tap", + "snap": "c8 tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc ./index.d.ts" + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" }, - "type": "commonjs", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", "exports": { - ".": { + "./min": { "import": { - "types": "./index.d.ts", - "default": "./index.mjs" + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" }, "require": { - "types": "./index.d.ts", - "default": "./index.js" + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" } }, - "./package.json": "./package.json" + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", + "@types/node": "^20.2.5", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.7.0", + "ts-node": "^10.9.1", "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" + "typedoc": "^0.24.6", + "typescript": "^5.0.4" }, "license": "ISC", "files": [ - "index.js", - "index.mjs", - "index.d.ts" + "dist" ], "engines": { - "node": ">=12" + "node": "14 || >=16.14" }, "prettier": { "semi": false, @@ -78,19 +92,17 @@ "endOfLine": "lf" }, "tap": { - "nyc-arg": [ - "--include=index.js" - ], + "coverage": false, "node-arg": [ "--expose-gc", - "--require", + "-r", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./index.js" + "path": "./dist/mjs/index.js" } ] } diff --git a/deps/npm/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/make-fetch-happen/lib/remote.js index bdbcc79cad908d..2aef9f8f969b00 100644 --- a/deps/npm/node_modules/make-fetch-happen/lib/remote.js +++ b/deps/npm/node_modules/make-fetch-happen/lib/remote.js @@ -4,7 +4,7 @@ const promiseRetry = require('promise-retry') const ssri = require('ssri') const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') +const { getAgent } = require('@npmcli/agent') const pkg = require('../package.json') const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` @@ -14,9 +14,15 @@ const RETRY_ERRORS = [ 'ECONNREFUSED', // remote host refused to open connection 'EADDRINUSE', // failed to bind to a local port (proxy?) 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', // Known codes we do NOT retry on: // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent ] const RETRY_TYPES = [ diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index fd415dc9966faa..a874ace6d1d472 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "11.1.1", + "version": "13.0.0", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -33,32 +33,28 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", "ssri": "^10.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", "tap": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "tap": { "color": 1, @@ -72,7 +68,13 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", "publish": "true" } } diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json index 78024317d8be4d..581275ba27d4ff 100644 --- a/deps/npm/node_modules/minipass-fetch/package.json +++ b/deps/npm/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "3.0.3", + "version": "3.0.4", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -24,7 +24,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -36,7 +36,7 @@ "tap": "^16.0.0" }, "dependencies": { - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, @@ -63,7 +63,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.18.0", "publish": "true" } } diff --git a/deps/npm/node_modules/minipass/dist/cjs/index.js b/deps/npm/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 00000000000000..b6cdae8eb514b8 --- /dev/null +++ b/deps/npm/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const events_1 = require("events"); +const stream_1 = __importDefault(require("stream")); +const string_decoder_1 = require("string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass/dist/cjs/package.json b/deps/npm/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/minipass/dist/mjs/index.js b/deps/npm/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 00000000000000..b65fafbae43a4e --- /dev/null +++ b/deps/npm/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'events'; +import Stream from 'stream'; +import { StringDecoder } from 'string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass/dist/mjs/package.json b/deps/npm/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/minipass/package.json b/deps/npm/node_modules/minipass/package.json index 0e20e988047f23..6faaa247a5bc66 100644 --- a/deps/npm/node_modules/minipass/package.json +++ b/deps/npm/node_modules/minipass/package.json @@ -1,70 +1,52 @@ { "name": "minipass", - "version": "5.0.0", + "version": "7.0.3", "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", "exports": { ".": { "import": { - "types": "./index.d.ts", - "default": "./index.mjs" + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" }, "require": { - "types": "./index.d.ts", - "default": "./index.js" + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" } }, "./package.json": "./package.json" }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, + "files": [ + "dist" + ], "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", "preversion": "npm test", "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false }, "prettier": { "semi": false, - "printWidth": 80, + "printWidth": 75, "tabWidth": 2, "useTabs": false, "singleQuote": true, @@ -72,5 +54,29 @@ "bracketSameLine": true, "arrowParens": "avoid", "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" } } diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..f41b539df65dce --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..71461465812878 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..722a37af5ce157 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..0ff604a479c9c1 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..62e85c946490fc --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE new file mode 100644 index 00000000000000..de3226673c3874 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js new file mode 100644 index 00000000000000..668fb1cb9d45a4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js @@ -0,0 +1,202 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json new file mode 100644 index 00000000000000..7097d41e39de5d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json @@ -0,0 +1,46 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE new file mode 100644 index 00000000000000..ec7df93329abf3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md new file mode 100644 index 00000000000000..1bde1494664d4d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md @@ -0,0 +1,1214 @@ +# Glob + +Match files using the patterns the shell uses. + +The most correct and second fastest glob implementation in +JavaScript. (See **Comparison to Other JavaScript Glob +Implementations** at the bottom of this readme.) + +![a fun cartoon logo made of glob characters](https://github.com/isaacs/node-glob/raw/main/logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +**Note** the npm package name is _not_ `node-glob` that's a +different thing that was abandoned years ago. Just `glob`. + +```js +// load using import +import { glob, globSync, globStream, globStreamSync, Glob } from 'glob' +// or using commonjs, that's fine, too +const { + glob, + globSync, + globStream, + globStreamSync, + Glob, +} = require('glob') + +// the main glob() and globSync() resolve/return array of filenames + +// all js files, but don't look in node_modules +const jsfiles = await glob('**/*.js', { ignore: 'node_modules/**' }) + +// pass in a signal to cancel the glob walk +const stopAfter100ms = await glob('**/*.css', { + signal: AbortSignal.timeout(100), +}) + +// multiple patterns supported as well +const images = await glob(['css/*.{png,jpeg}', 'public/*.{png,jpeg}']) + +// but of course you can do that with the glob pattern also +// the sync function is the same, just returns a string[] instead +// of Promise +const imagesAlt = globSync('{css,public}/*.{png,jpeg}') + +// you can also stream them, this is a Minipass stream +const filesStream = globStream(['**/*.dat', 'logs/**/*.log']) + +// construct a Glob object if you wanna do it that way, which +// allows for much faster walks if you have to look in the same +// folder multiple times. +const g = new Glob('**/foo', {}) +// glob objects are async iterators, can also do globIterate() or +// g.iterate(), same deal +for await (const file of g) { + console.log('found a foo file:', file) +} +// pass a glob as the glob options to reuse its settings and caches +const g2 = new Glob('**/bar', g) +// sync iteration works as well +for (const file of g2) { + console.log('found a bar file:', file) +} + +// you can also pass withFileTypes: true to get Path objects +// these are like a Dirent, but with some more added powers +// check out http://npm.im/path-scurry for more info on their API +const g3 = new Glob('**/baz/**', { withFileTypes: true }) +g3.stream().on('data', path => { + console.log( + 'got a path object', + path.fullpath(), + path.isDirectory(), + path.readdirSync().map(e => e.name) + ) +}) + +// if you use stat:true and withFileTypes, you can sort results +// by things like modified time, filter by permission mode, etc. +// All Stats fields will be available in that case. Slightly +// slower, though. +// For example: +const results = await glob('**', { stat: true, withFileTypes: true }) + +const timeSortedFiles = results + .sort((a, b) => a.mtimeMS - b.mtimeMS) + .map(path => path.fullpath()) + +const groupReadableFiles = results + .filter(path => path.mode & 0o040) + .map(path => path.fullpath()) + +// custom ignores can be done like this, for example by saying +// you'll ignore all markdown files, and all folders named 'docs' +const customIgnoreResults = await glob('**', { + ignore: { + ignored: p => /\.md$/.test(p.name), + childrenIgnored: p => p.isNamed('docs'), + }, +}) + +// another fun use case, only return files with the same name as +// their parent folder, plus either `.ts` or `.js` +const folderNamedModules = await glob('**/*.{ts,js}', { + ignore: { + ignored: p => { + const pp = p.parent + return !(p.isNamed(pp.name + '.ts') || p.isNamed(pp.name + '.js')) + }, + }, +}) + +// find all files edited in the last hour, to do this, we ignore +// all of them that are more than an hour old +const newFiles = await glob('**', { + // need stat so we have mtime + stat: true, + // only want the files, not the dirs + nodir: true, + ignore: { + ignored: p => { + return new Date() - p.mtime > 60 * 60 * 1000 + }, + // could add similar childrenIgnored here as well, but + // directory mtime is inconsistent across platforms, so + // probably better not to, unless you know the system + // tracks this reliably. + }, +}) +``` + +**Note** Glob patterns should always use `/` as a path separator, +even on Windows systems, as `\` is used to escape glob +characters. If you wish to use `\` as a path separator _instead +of_ using it as an escape character on Windows platforms, you may +set `windowsPathsNoEscape:true` in the options. In this mode, +special glob characters cannot be escaped, making it impossible +to match a literal `*` `?` and so on in filenames. + +## Command Line Interface + +``` +$ glob -h + +Usage: + glob [options] [ [ ...]] + +Expand the positional glob expression arguments into any matching file system +paths found. + + -c --cmd= + Run the command provided, passing the glob expression + matches as arguments. + + -A --all By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell + expands an argument whose filename is a glob + expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', + then on Windows powershell or cmd.exe, 'glob app/*.ts' + will expand to 'app/[id].ts', as expected. However, in + posix shells such as bash or zsh, the shell will first + expand 'app/*.ts' to a list of filenames. Then glob + will look for a file matching 'app/[id].ts' (ie, + 'app/i.ts' or 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob to + treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + + -a --absolute Expand to absolute paths + -d --dot-relative Prepend './' on relative matches + -m --mark Append a / on any directories matched + -x --posix Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\foo\bar', it + will expand to '//?/C:/foo/bar'. + + -f --follow Follow symlinked directories when expanding '**' + -R --realpath Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls. + + -s --stat Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match. + + -b --match-base Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + + --dot Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + + --nobrace Do not expand {...} patterns + --nocase Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on all + others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + + --nodir Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + + --noext Do not expand extglob patterns, such as '+(a|b)' + --noglobstar Do not expand '**' against multiple path portions. Ie, + treat it as a normal '*' instead. + + --windows-path-no-escape + Use '\' as a path separator *only*, and *never* as an + escape character. If set, all '\' characters are + replaced with '/' in the pattern. + + -D --max-depth= Maximum depth to traverse from the current working + directory + + -C --cwd= Current working directory to execute/match in + -r --root= A string path resolved against the 'cwd', which is used + as the starting point for absolute patterns that start + with '/' (but not drive letters or UNC paths on + Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root + directory on the filesystem, and any non-absolute + patterns will still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + + --platform= Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior! + + -i --ignore= + Glob patterns to ignore Can be set multiple times + -v --debug Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files. + + -h --help Show this usage information +``` + +## `glob(pattern: string | string[], options?: GlobOptions) => Promise` + +Perform an asynchronous glob search for the pattern(s) specified. +Returns +[Path](https://isaacs.github.io/path-scurry/classes/PathBase) +objects if the `withFileTypes` option is set to `true`. See below +for full options field desciptions. + +## `globSync(pattern: string | string[], options?: GlobOptions) => string[] | Path[]` + +Synchronous form of `glob()`. + +Alias: `glob.sync()` + +## `globIterate(pattern: string | string[], options?: GlobOptions) => AsyncGenerator` + +Return an async iterator for walking glob pattern matches. + +Alias: `glob.iterate()` + +## `globIterateSync(pattern: string | string[], options?: GlobOptions) => Generator` + +Return a sync iterator for walking glob pattern matches. + +Alias: `glob.iterate.sync()`, `glob.sync.iterate()` + +## `globStream(pattern: string | string[], options?: GlobOptions) => Minipass` + +Return a stream that emits all the strings or `Path` objects and +then emits `end` when completed. + +Alias: `glob.stream()` + +## `globStreamSync(pattern: string | string[], options?: GlobOptions) => Minipass` + +Syncronous form of `globStream()`. Will read all the matches as +fast as you consume them, even all in a single tick if you +consume them immediately, but will still respond to backpressure +if they're not consumed immediately. + +Alias: `glob.stream.sync()`, `glob.sync.stream()` + +## `hasMagic(pattern: string | string[], options?: GlobOptions) => boolean` + +Returns `true` if the provided pattern contains any "magic" glob +characters, given the options provided. + +Brace expansion is not considered "magic" unless the +`magicalBraces` option is set, as brace expansion just turns one +string into an array of strings. So a pattern like `'x{a,b}y'` +would return `false`, because `'xay'` and `'xby'` both do not +contain any magic glob characters, and it's treated the same as +if you had called it on `['xay', 'xby']`. When +`magicalBraces:true` is in the options, brace expansion _is_ +treated as a pattern having magic. + +## `escape(pattern: string, options?: GlobOptions) => string` + +Escape all magic characters in a glob pattern, so that it will +only ever match literal strings + +If the `windowsPathsNoEscape` option is used, then characters are +escaped by wrapping in `[]`, because a magic character wrapped in +a character class can only be satisfied by that exact character. + +Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot +be escaped or unescaped. + +## `unescape(pattern: string, options?: GlobOptions) => string` + +Un-escape a glob string that may contain some escaped characters. + +If the `windowsPathsNoEscape` option is used, then square-brace +escapes are removed, but not backslash escapes. For example, it +will turn the string `'[*]'` into `*`, but it will not turn +`'\\*'` into `'*'`, because `\` is a path separator in +`windowsPathsNoEscape` mode. + +When `windowsPathsNoEscape` is not set, then both brace escapes +and backslash escapes are removed. + +Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot +be escaped or unescaped. + +## Class `Glob` + +An object that can perform glob pattern traversals. + +### `const g = new Glob(pattern: string | string[], options: GlobOptions)` + +Options object is required. + +See full options descriptions below. + +Note that a previous `Glob` object can be passed as the +`GlobOptions` to another `Glob` instantiation to re-use settings +and caches with a new pattern. + +Traversal functions can be called multiple times to run the walk +again. + +### `g.stream()` + +Stream results asynchronously, + +### `g.streamSync()` + +Stream results synchronously. + +### `g.iterate()` + +Default async iteration function. Returns an AsyncGenerator that +iterates over the results. + +### `g.iterateSync()` + +Default sync iteration function. Returns a Generator that +iterates over the results. + +### `g.walk()` + +Returns a Promise that resolves to the results array. + +### `g.walkSync()` + +Returns a results array. + +### Properties + +All options are stored as properties on the `Glob` object. + +- `opts` The options provided to the constructor. +- `patterns` An array of parsed immutable `Pattern` objects. + +## Options + +Exported as `GlobOptions` TypeScript interface. A `GlobOptions` +object may be provided to any of the exported methods, and must +be provided to the `Glob` constructor. + +All options are optional, boolean, and false by default, unless +otherwise noted. + +All resolved options are added to the Glob object as properties. + +If you are running many `glob` operations, you can pass a Glob +object as the `options` argument to a subsequent operation to +share the previously loaded cache. + +- `cwd` String path or `file://` string or URL object. The + current working directory in which to search. Defaults to + `process.cwd()`. See also: "Windows, CWDs, Drive Letters, and + UNC Paths", below. + + This option may be eiher a string path or a `file://` URL + object or string. + +- `root` A string path resolved against the `cwd` option, which + is used as the starting point for absolute patterns that start + with `/`, (but not drive letters or UNC paths on Windows). + + Note that this _doesn't_ necessarily limit the walk to the + `root` directory, and doesn't affect the cwd starting point for + non-absolute patterns. A pattern containing `..` will still be + able to traverse out of the root directory, if it is not an + actual root directory on the filesystem, and any non-absolute + patterns will be matched in the `cwd`. For example, the + pattern `/../*` with `{root:'/some/path'}` will return all + files in `/some`, not all files in `/some/path`. The pattern + `*` with `{root:'/some/path'}` will return all the entries in + the cwd, not the entries in `/some/path`. + + To start absolute and non-absolute patterns in the same + path, you can use `{root:''}`. However, be aware that on + Windows systems, a pattern like `x:/*` or `//host/share/*` will + _always_ start in the `x:/` or `//host/share` directory, + regardless of the `root` setting. + +- `windowsPathsNoEscape` Use `\\` as a path separator _only_, and + _never_ as an escape character. If set, all `\\` characters are + replaced with `/` in the pattern. + + Note that this makes it **impossible** to match against paths + containing literal glob pattern characters, but allows matching + with patterns constructed using `path.join()` and + `path.resolve()` on Windows platforms, mimicking the (buggy!) + behavior of Glob v7 and before on Windows. Please use with + caution, and be mindful of [the caveat below about Windows + paths](#windows). (For legacy reasons, this is also set if + `allowWindowsEscape` is set to the exact value `false`.) + +- `dot` Include `.dot` files in normal matches and `globstar` + matches. Note that an explicit dot in a portion of the pattern + will always match dot files. + +- `magicalBraces` Treat brace expansion like `{a,b}` as a "magic" + pattern. Has no effect if {@link nobrace} is set. + + Only has effect on the {@link hasMagic} function, no effect on + glob pattern matching itself. + +- `dotRelative` Prepend all relative path strings with `./` (or + `.\` on Windows). + + Without this option, returned relative paths are "bare", so + instead of returning `'./foo/bar'`, they are returned as + `'foo/bar'`. + + Relative patterns starting with `'../'` are not prepended with + `./`, even if this option is set. + +- `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. + +- `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. + +- `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) + +- `noext` Do not match "extglob" patterns such as `+(a|b)`. + +- `nocase` Perform a case-insensitive match. This defaults to + `true` on macOS and Windows systems, and `false` on all others. + + **Note** `nocase` should only be explicitly set when it is + known that the filesystem's case sensitivity differs from the + platform default. If set `true` on case-sensitive file + systems, or `false` on case-insensitive file systems, then the + walk may return more or less results than expected. + +- `maxDepth` Specify a number to limit the depth of the directory + traversal to this many levels below the `cwd`. + +- `matchBase` Perform a basename-only match if the pattern does + not contain any slash characters. That is, `*.js` would be + treated as equivalent to `**/*.js`, matching all js files in + all directories. + +- `nodir` Do not match directories, only files. (Note: to match + _only_ directories, put a `/` at the end of the pattern.) + +- `stat` Call `lstat()` on all entries, whether required or not + to determine whether it's a valid match. When used with + `withFileTypes`, this means that matches will include data such + as modified time, permissions, and so on. Note that this will + incur a performance cost due to the added system calls. + +- `ignore` string or string[], or an object with `ignore` and + `ignoreChildren` methods. + + If a string or string[] is provided, then this is treated as a + glob pattern or array of glob patterns to exclude from matches. + To ignore all children within a directory, as well as the entry + itself, append `'/**'` to the ignore pattern. + + **Note** `ignore` patterns are _always_ in `dot:true` mode, + regardless of any other settings. + + If an object is provided that has `ignored(path)` and/or + `childrenIgnored(path)` methods, then these methods will be + called to determine whether any Path is a match or if its + children should be traversed, respectively. + +- `follow` Follow symlinked directories when expanding `**` + patterns. This can result in a lot of duplicate references in + the presence of cyclic links, and make performance quite bad. + + By default, a `**` in a pattern will follow 1 symbolic link if + it is not the first item in the pattern, or none if it is the + first item in the pattern, following the same behavior as Bash. + +- `realpath` Set to true to call `fs.realpath` on all of the + results. In the case of an entry that cannot be resolved, the + entry is omitted. This incurs a slight performance penalty, of + course, because of the added system calls. + +- `absolute` Set to true to always receive absolute paths for + matched files. Set to `false` to always receive relative paths + for matched files. + + By default, when this option is not set, absolute paths are + returned for patterns that are absolute, and otherwise paths + are returned that are relative to the `cwd` setting. + + This does _not_ make an extra system call to get the realpath, + it only does string path resolution. + + `absolute` may not be used along with `withFileTypes`. + +- `posix` Set to true to use `/` as the path separator in + returned results. On posix systems, this has no effect. On + Windows systems, this will return `/` delimited path results, + and absolute paths will be returned in their full resolved UNC + path form, eg insted of `'C:\\foo\\bar'`, it will return + `//?/C:/foo/bar`. + +- `platform` Defaults to value of `process.platform` if + available, or `'linux'` if not. Setting `platform:'win32'` on + non-Windows systems may cause strange behavior. + +- `withFileTypes` Return [PathScurry](http://npm.im/path-scurry) + `Path` objects instead of strings. These are similar to a + NodeJS `Dirent` object, but with additional methods and + properties. + + `withFileTypes` may not be used along with `absolute`. + +- `signal` An AbortSignal which will cancel the Glob walk when + triggered. + +- `fs` An override object to pass in custom filesystem methods. + See [PathScurry docs](http://npm.im/path-scurry) for what can + be overridden. + +- `scurry` A [PathScurry](http://npm.im/path-scurry) object used + to traverse the file system. If the `nocase` option is set + explicitly, then any provided `scurry` object must match this + setting. + +## Glob Primer + +Much more information about glob pattern expansion can be found +by running `man bash` and searching for `Pattern Matching`. + +"Globs" are the patterns you type when you do stuff like `ls +*.js` on the command line, or put `build/*` in a `.gitignore` +file. + +Before parsing the path part patterns, braced sections are +expanded into a set. Braced sections start with `{` and end with +`}`, with 2 or more comma-delimited sections within. Braced +sections may contain slash characters, so `a{/b/c,bcd}` would +expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in +a path portion. With the exception of `**`, none of these match +path separators (ie, `/` on all platforms, and `\` on Windows). + +- `*` Matches 0 or more characters in a single path portion. + When alone in a path portion, it must match at least 1 + character. If `dot:true` is not specified, then `*` will not + match against a `.` character at the start of a path portion. +- `?` Matches 1 character. If `dot:true` is not specified, then + `?` will not match against a `.` character at the start of a + path portion. +- `[...]` Matches a range of characters, similar to a RegExp + range. If the first character of the range is `!` or `^` then + it matches any character not in the range. If the first + character is `]`, then it will be considered the same as `\]`, + rather than the end of the character class. +- `!(pattern|pattern|pattern)` Matches anything that does not + match any of the patterns provided. May _not_ contain `/` + characters. Similar to `*`, if alone in a path portion, then + the path portion must have at least one character. +- `?(pattern|pattern|pattern)` Matches zero or one occurrence of + the patterns provided. May _not_ contain `/` characters. +- `+(pattern|pattern|pattern)` Matches one or more occurrences of + the patterns provided. May _not_ contain `/` characters. +- `*(a|b|c)` Matches zero or more occurrences of the patterns + provided. May _not_ contain `/` characters. +- `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided. May _not_ contain `/` characters. +- `**` If a "globstar" is alone in a path portion, then it + matches zero or more directories and subdirectories searching + for matches. It does not crawl symlinked directories, unless + `{follow:true}` is passed in the options object. A pattern + like `a/b/**` will only match `a/b` if it is a directory. + Follows 1 symbolic link if not the first item in the pattern, + or 0 if it is the first item, unless `follow:true` is set, in + which case it follows all symbolic links. + +`[:class:]` patterns are supported by this implementation, but +`[=c=]` and `[.symbol.]` style class patterns are not. + +### Dots + +If a file or directory path portion has a `.` as the first +character, then it will not match any glob pattern unless that +pattern's corresponding path part also has a `.` as its first +character. + +For example, the pattern `a/.*/c` would match the file at +`a/.b/c`. However the pattern `a/*/c` would not, because `*` does +not start with a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has +no slashes in it, then it will seek for any file anywhere in the +tree with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. +This differs from the shell, where the pattern itself is +returned. For example: + +```sh +$ echo a*s*d*f +a*s*d*f +``` + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a +worthwhile goal, some discrepancies exist between node-glob and +other implementations, and are intentional. + +The double-star character `**` is supported by default, unless +the `noglobstar` flag is set. This is supported in the manner of +bsdglob and bash 5, where `**` only has special significance if +it is the only thing in a path part. That is, `a/**/b` will match +`a/x/y/b`, but `a/**b` will not. + +Note that symlinked directories are not traversed as part of a +`**`, though their contents may match against subsequent portions +of the pattern. This prevents infinite loops and duplicates and +the like. You can force glob to traverse symlinks with `**` by +setting `{follow:true}` in the options. + +There is no equivalent of the `nonull` option. A pattern that +does not find any matches simply resolves to nothing. (An empty +array, immediately ended stream, etc.) + +If brace expansion is not disabled, then it is performed before +any other interpretation of the glob pattern. Thus, a pattern +like `+(a|{b),c)}`, which would not be valid in bash or zsh, is +expanded **first** into the set of `+(a|b)` and `+(a|c)`, and +those patterns are checked for validity. Since those two are +valid, matching proceeds. + +The character class patterns `[:class:]` (posix standard named +classes) style class patterns are supported and unicode-aware, +but `[=c=]` (locale-specific character collation weight), and +`[.symbol.]` (collating symbol), are not. + +### Repeated Slashes + +Unlike Bash and zsh, repeated `/` are always coalesced into a +single path separator. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if +it started with a `#` character, or a "negated" pattern if it +started with a `!` character. + +These options were deprecated in version 5, and removed in +version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only +`/` characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will +always be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto +the root setting using `path.join`. On windows, this will by +default result in `/foo/*` matching `C:\foo\bar.txt`. + +To automatically coerce all `\` characters to `/` in pattern +strings, **thus making it impossible to escape literal glob +characters**, you may set the `windowsPathsNoEscape` option to +`true`. + +### Windows, CWDs, Drive Letters, and UNC Paths + +On posix systems, when a pattern starts with `/`, any `cwd` +option is ignored, and the traversal starts at `/`, plus any +non-magic path portions specified in the pattern. + +On Windows systems, the behavior is similar, but the concept of +an "absolute path" is somewhat more involved. + +#### UNC Paths + +A UNC path may be used as the start of a pattern on Windows +platforms. For example, a pattern like: `//?/x:/*` will return +all file entries in the root of the `x:` drive. A pattern like +`//ComputerName/Share/*` will return all files in the associated +share. + +UNC path roots are always compared case insensitively. + +#### Drive Letters + +A pattern starting with a drive letter, like `c:/*`, will search +in that drive, regardless of any `cwd` option provided. + +If the pattern starts with `/`, and is not a UNC path, and there +is an explicit `cwd` option set with a drive letter, then the +drive letter in the `cwd` is used as the root of the directory +traversal. + +For example, `glob('/tmp', { cwd: 'c:/any/thing' })` will return +`['c:/tmp']` as the result. + +If an explicit `cwd` option is not provided, and the pattern +starts with `/`, then the traversal will run on the root of the +drive provided as the `cwd` option. (That is, it is the result of +`path.resolve('/')`.) + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race +conditions, since it relies on directory walking. + +As a result, it is possible that a file that exists when glob +looks for it may have been deleted or modified by the time it +returns the result. + +By design, this implementation caches all readdir calls that it +makes, in order to cut down on system overhead. However, this +also makes it even more susceptible to races, especially if the +cache object is reused between glob calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast +majority of operations, this is never a problem. + +### See Also: + +- `man sh` +- `man bash` [Pattern + Matching](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) +- `man 3 fnmatch` +- `man 5 gitignore` +- [minimatch documentation](https://github.com/isaacs/minimatch) + +## Glob Logo + +Glob's logo was created by [Tanya +Brassie](http://tanyabrassie.com/). Logo files can be found +[here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons +Attribution-ShareAlike 4.0 International +License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a +test. + +Patches that fail tests or reduce performance will be rejected. + +```sh +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# run the benchmarks +npm run bench + +# to profile javascript +npm run prof +``` + +## Comparison to Other JavaScript Glob Implementations + +**tl;dr** + +- If you want glob matching that is as faithful as possible to + Bash pattern expansion semantics, and as fast as possible + within that constraint, _use this module_. +- If you are reasonably sure that the patterns you will encounter + are relatively simple, and want the absolutely fastest glob + matcher out there, _use [fast-glob](http://npm.im/fast-glob)_. +- If you are reasonably sure that the patterns you will encounter + are relatively simple, and want the convenience of + automatically respecting `.gitignore` files, _use + [globby](http://npm.im/globby)_. + +There are some other glob matcher libraries on npm, but these +three are (in my opinion, as of 2023) the best. + +--- + +**full explanation** + +Every library reflects a set of opinions and priorities in the +trade-offs it makes. Other than this library, I can personally +recommend both [globby](http://npm.im/globby) and +[fast-glob](http://npm.im/fast-glob), though they differ in their +benefits and drawbacks. + +Both have very nice APIs and are reasonably fast. + +`fast-glob` is, as far as I am aware, the fastest glob +implementation in JavaScript today. However, there are many +cases where the choices that `fast-glob` makes in pursuit of +speed mean that its results differ from the results returned by +Bash and other sh-like shells, which may be surprising. + +In my testing, `fast-glob` is around 10-20% faster than this +module when walking over 200k files nested 4 directories +deep[1](#fn-webscale). However, there are some inconsistencies +with Bash matching behavior that this module does not suffer +from: + +- `**` only matches files, not directories +- `..` path portions are not handled unless they appear at the + start of the pattern +- `./!()` will not match any files that _start_ with + ``, even if they do not match ``. For + example, `!(9).txt` will not match `9999.txt`. +- Some brace patterns in the middle of a pattern will result in + failing to find certain matches. +- Extglob patterns are allowed to contain `/` characters. + +Globby exhibits all of the same pattern semantics as fast-glob, +(as it is a wrapper around fast-glob) and is slightly slower than +node-glob (by about 10-20% in the benchmark test set, or in other +words, anywhere from 20-50% slower than fast-glob). However, it +adds some API conveniences that may be worth the costs. + +- Support for `.gitignore` and other ignore files. +- Support for negated globs (ie, patterns starting with `!` + rather than using a separate `ignore` option). + +The priority of this module is "correctness" in the sense of +performing a glob pattern expansion as faithfully as possible to +the behavior of Bash and other sh-like shells, with as much speed +as possible. + +Note that prior versions of `node-glob` are _not_ on this list. +Former versions of this module are far too slow for any cases +where performance matters at all, and were designed with APIs +that are extremely dated by current JavaScript standards. + +--- + +[1]: In the cases where this module +returns results and `fast-glob` doesn't, it's even faster, of +course. + +![lumpy space princess saying 'oh my GLOB'](https://github.com/isaacs/node-glob/raw/main/oh-my-glob.gif) + +### Benchmark Results + +First number is time, smaller is better. + +Second number is the count of results returned. + +``` +--- pattern: '**' --- +~~ sync ~~ +node fast-glob sync 0m0.598s 200364 +node globby sync 0m0.765s 200364 +node current globSync mjs 0m0.683s 222656 +node current glob syncStream 0m0.649s 222656 +~~ async ~~ +node fast-glob async 0m0.350s 200364 +node globby async 0m0.509s 200364 +node current glob async mjs 0m0.463s 222656 +node current glob stream 0m0.411s 222656 + +--- pattern: '**/..' --- +~~ sync ~~ +node fast-glob sync 0m0.486s 0 +node globby sync 0m0.769s 200364 +node current globSync mjs 0m0.564s 2242 +node current glob syncStream 0m0.583s 2242 +~~ async ~~ +node fast-glob async 0m0.283s 0 +node globby async 0m0.512s 200364 +node current glob async mjs 0m0.299s 2242 +node current glob stream 0m0.312s 2242 + +--- pattern: './**/0/**/0/**/0/**/0/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.490s 10 +node globby sync 0m0.517s 10 +node current globSync mjs 0m0.540s 10 +node current glob syncStream 0m0.550s 10 +~~ async ~~ +node fast-glob async 0m0.290s 10 +node globby async 0m0.296s 10 +node current glob async mjs 0m0.278s 10 +node current glob stream 0m0.302s 10 + +--- pattern: './**/[01]/**/[12]/**/[23]/**/[45]/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.500s 160 +node globby sync 0m0.528s 160 +node current globSync mjs 0m0.556s 160 +node current glob syncStream 0m0.573s 160 +~~ async ~~ +node fast-glob async 0m0.283s 160 +node globby async 0m0.301s 160 +node current glob async mjs 0m0.306s 160 +node current glob stream 0m0.322s 160 + +--- pattern: './**/0/**/0/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.502s 5230 +node globby sync 0m0.527s 5230 +node current globSync mjs 0m0.544s 5230 +node current glob syncStream 0m0.557s 5230 +~~ async ~~ +node fast-glob async 0m0.285s 5230 +node globby async 0m0.305s 5230 +node current glob async mjs 0m0.304s 5230 +node current glob stream 0m0.310s 5230 + +--- pattern: '**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.580s 200023 +node globby sync 0m0.771s 200023 +node current globSync mjs 0m0.685s 200023 +node current glob syncStream 0m0.649s 200023 +~~ async ~~ +node fast-glob async 0m0.349s 200023 +node globby async 0m0.509s 200023 +node current glob async mjs 0m0.427s 200023 +node current glob stream 0m0.388s 200023 + +--- pattern: '{**/*.txt,**/?/**/*.txt,**/?/**/?/**/*.txt,**/?/**/?/**/?/**/*.txt,**/?/**/?/**/?/**/?/**/*.txt}' --- +~~ sync ~~ +node fast-glob sync 0m0.589s 200023 +node globby sync 0m0.771s 200023 +node current globSync mjs 0m0.716s 200023 +node current glob syncStream 0m0.684s 200023 +~~ async ~~ +node fast-glob async 0m0.351s 200023 +node globby async 0m0.518s 200023 +node current glob async mjs 0m0.462s 200023 +node current glob stream 0m0.468s 200023 + +--- pattern: '**/5555/0000/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.496s 1000 +node globby sync 0m0.519s 1000 +node current globSync mjs 0m0.539s 1000 +node current glob syncStream 0m0.567s 1000 +~~ async ~~ +node fast-glob async 0m0.285s 1000 +node globby async 0m0.299s 1000 +node current glob async mjs 0m0.305s 1000 +node current glob stream 0m0.301s 1000 + +--- pattern: './**/0/**/../[01]/**/0/../**/0/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.484s 0 +node globby sync 0m0.507s 0 +node current globSync mjs 0m0.577s 4880 +node current glob syncStream 0m0.586s 4880 +~~ async ~~ +node fast-glob async 0m0.280s 0 +node globby async 0m0.298s 0 +node current glob async mjs 0m0.327s 4880 +node current glob stream 0m0.324s 4880 + +--- pattern: '**/????/????/????/????/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.547s 100000 +node globby sync 0m0.673s 100000 +node current globSync mjs 0m0.626s 100000 +node current glob syncStream 0m0.618s 100000 +~~ async ~~ +node fast-glob async 0m0.315s 100000 +node globby async 0m0.414s 100000 +node current glob async mjs 0m0.366s 100000 +node current glob stream 0m0.345s 100000 + +--- pattern: './{**/?{/**/?{/**/?{/**/?,,,,},,,,},,,,},,,}/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.588s 100000 +node globby sync 0m0.670s 100000 +node current globSync mjs 0m0.717s 200023 +node current glob syncStream 0m0.687s 200023 +~~ async ~~ +node fast-glob async 0m0.343s 100000 +node globby async 0m0.418s 100000 +node current glob async mjs 0m0.519s 200023 +node current glob stream 0m0.451s 200023 + +--- pattern: '**/!(0|9).txt' --- +~~ sync ~~ +node fast-glob sync 0m0.573s 160023 +node globby sync 0m0.731s 160023 +node current globSync mjs 0m0.680s 180023 +node current glob syncStream 0m0.659s 180023 +~~ async ~~ +node fast-glob async 0m0.345s 160023 +node globby async 0m0.476s 160023 +node current glob async mjs 0m0.427s 180023 +node current glob stream 0m0.388s 180023 + +--- pattern: './{*/**/../{*/**/../{*/**/../{*/**/../{*/**,,,,},,,,},,,,},,,,},,,,}/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.483s 0 +node globby sync 0m0.512s 0 +node current globSync mjs 0m0.811s 200023 +node current glob syncStream 0m0.773s 200023 +~~ async ~~ +node fast-glob async 0m0.280s 0 +node globby async 0m0.299s 0 +node current glob async mjs 0m0.617s 200023 +node current glob stream 0m0.568s 200023 + +--- pattern: './*/**/../*/**/../*/**/../*/**/../*/**/../*/**/../*/**/../*/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.485s 0 +node globby sync 0m0.507s 0 +node current globSync mjs 0m0.759s 200023 +node current glob syncStream 0m0.740s 200023 +~~ async ~~ +node fast-glob async 0m0.281s 0 +node globby async 0m0.297s 0 +node current glob async mjs 0m0.544s 200023 +node current glob stream 0m0.464s 200023 + +--- pattern: './*/**/../*/**/../*/**/../*/**/../*/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.486s 0 +node globby sync 0m0.513s 0 +node current globSync mjs 0m0.734s 200023 +node current glob syncStream 0m0.696s 200023 +~~ async ~~ +node fast-glob async 0m0.286s 0 +node globby async 0m0.296s 0 +node current glob async mjs 0m0.506s 200023 +node current glob stream 0m0.483s 200023 + +--- pattern: './0/**/../1/**/../2/**/../3/**/../4/**/../5/**/../6/**/../7/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.060s 0 +node globby sync 0m0.074s 0 +node current globSync mjs 0m0.067s 0 +node current glob syncStream 0m0.066s 0 +~~ async ~~ +node fast-glob async 0m0.060s 0 +node globby async 0m0.075s 0 +node current glob async mjs 0m0.066s 0 +node current glob stream 0m0.067s 0 + +--- pattern: './**/?/**/?/**/?/**/?/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.568s 100000 +node globby sync 0m0.651s 100000 +node current globSync mjs 0m0.619s 100000 +node current glob syncStream 0m0.617s 100000 +~~ async ~~ +node fast-glob async 0m0.332s 100000 +node globby async 0m0.409s 100000 +node current glob async mjs 0m0.372s 100000 +node current glob stream 0m0.351s 100000 + +--- pattern: '**/*/**/*/**/*/**/*/**' --- +~~ sync ~~ +node fast-glob sync 0m0.603s 200113 +node globby sync 0m0.798s 200113 +node current globSync mjs 0m0.730s 222137 +node current glob syncStream 0m0.693s 222137 +~~ async ~~ +node fast-glob async 0m0.356s 200113 +node globby async 0m0.525s 200113 +node current glob async mjs 0m0.508s 222137 +node current glob stream 0m0.455s 222137 + +--- pattern: './**/*/**/*/**/*/**/*/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.622s 200000 +node globby sync 0m0.792s 200000 +node current globSync mjs 0m0.722s 200000 +node current glob syncStream 0m0.695s 200000 +~~ async ~~ +node fast-glob async 0m0.369s 200000 +node globby async 0m0.527s 200000 +node current glob async mjs 0m0.502s 200000 +node current glob stream 0m0.481s 200000 + +--- pattern: '**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.588s 200023 +node globby sync 0m0.771s 200023 +node current globSync mjs 0m0.684s 200023 +node current glob syncStream 0m0.658s 200023 +~~ async ~~ +node fast-glob async 0m0.352s 200023 +node globby async 0m0.516s 200023 +node current glob async mjs 0m0.432s 200023 +node current glob stream 0m0.384s 200023 + +--- pattern: './**/**/**/**/**/**/**/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.589s 200023 +node globby sync 0m0.766s 200023 +node current globSync mjs 0m0.682s 200023 +node current glob syncStream 0m0.652s 200023 +~~ async ~~ +node fast-glob async 0m0.352s 200023 +node globby async 0m0.523s 200023 +node current glob async mjs 0m0.436s 200023 +node current glob stream 0m0.380s 200023 + +--- pattern: '**/*/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.592s 200023 +node globby sync 0m0.776s 200023 +node current globSync mjs 0m0.691s 200023 +node current glob syncStream 0m0.659s 200023 +~~ async ~~ +node fast-glob async 0m0.357s 200023 +node globby async 0m0.513s 200023 +node current glob async mjs 0m0.471s 200023 +node current glob stream 0m0.424s 200023 + +--- pattern: '**/*/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.585s 200023 +node globby sync 0m0.766s 200023 +node current globSync mjs 0m0.694s 200023 +node current glob syncStream 0m0.664s 200023 +~~ async ~~ +node fast-glob async 0m0.350s 200023 +node globby async 0m0.514s 200023 +node current glob async mjs 0m0.472s 200023 +node current glob stream 0m0.424s 200023 + +--- pattern: '**/[0-9]/**/*.txt' --- +~~ sync ~~ +node fast-glob sync 0m0.544s 100000 +node globby sync 0m0.636s 100000 +node current globSync mjs 0m0.626s 100000 +node current glob syncStream 0m0.621s 100000 +~~ async ~~ +node fast-glob async 0m0.322s 100000 +node globby async 0m0.404s 100000 +node current glob async mjs 0m0.360s 100000 +node current glob stream 0m0.352s 100000 +``` diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json new file mode 100644 index 00000000000000..c15df94a3582bf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "commonjs" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts new file mode 100644 index 00000000000000..34e005228653c8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map new file mode 100644 index 00000000000000..c10c656ec75109 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js new file mode 100755 index 00000000000000..4a8a88f2734d2e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js @@ -0,0 +1,270 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const foreground_child_1 = require("foreground-child"); +const fs_1 = require("fs"); +const jackspeak_1 = require("jackspeak"); +const package_json_1 = require("../package.json"); +const index_js_1 = require("./index.js"); +const j = (0, jackspeak_1.jack)({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${package_json_1.version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validate: v => new Set([ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ]).has(v), + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, +}) + .flag({ + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all + ? positionals + : positionals.filter(p => !(0, fs_1.existsSync)(p)); + const matches = values.all ? [] : positionals.filter(p => (0, fs_1.existsSync)(p)); + const stream = (0, index_js_1.globStream)(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => (0, foreground_child_1.foregroundChild)(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map new file mode 100644 index 00000000000000..e189acfd01b1a7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,kDAAyC;AACzC,yCAAuC;AAEvC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { version } from '../package.json'\nimport { globStream } from './index.js'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts new file mode 100644 index 00000000000000..a8b3da7722b652 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts @@ -0,0 +1,344 @@ +/// +import { Minimatch } from 'minimatch'; +import { Minipass } from 'minipass'; +import { FSOption, Path, PathScurry } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +export type MatchSet = Minimatch['set']; +export type GlobParts = Exclude; +/** + * A `GlobOptions` object may be provided to any of the exported methods, and + * must be provided to the `Glob` constructor. + * + * All options are optional, boolean, and false by default, unless otherwise + * noted. + * + * All resolved options are added to the Glob object as properties. + * + * If you are running many `glob` operations, you can pass a Glob object as the + * `options` argument to a subsequent operation to share the previously loaded + * cache. + */ +export interface GlobOptions { + /** + * Set to `true` to always receive absolute paths for + * matched files. Set to `false` to always return relative paths. + * + * When this option is not set, absolute paths are returned for patterns + * that are absolute, and otherwise paths are returned that are relative + * to the `cwd` setting. + * + * This does _not_ make an extra system call to get + * the realpath, it only does string path resolution. + * + * Conflicts with {@link withFileTypes} + */ + absolute?: boolean; + /** + * Set to false to enable {@link windowsPathsNoEscape} + * + * @deprecated + */ + allowWindowsEscape?: boolean; + /** + * The current working directory in which to search. Defaults to + * `process.cwd()`. + * + * May be eiher a string path or a `file://` URL object or string. + */ + cwd?: string | URL; + /** + * Include `.dot` files in normal matches and `globstar` + * matches. Note that an explicit dot in a portion of the pattern + * will always match dot files. + */ + dot?: boolean; + /** + * Prepend all relative path strings with `./` (or `.\` on Windows). + * + * Without this option, returned relative paths are "bare", so instead of + * returning `'./foo/bar'`, they are returned as `'foo/bar'`. + * + * Relative patterns starting with `'../'` are not prepended with `./`, even + * if this option is set. + */ + dotRelative?: boolean; + /** + * Follow symlinked directories when expanding `**` + * patterns. This can result in a lot of duplicate references in + * the presence of cyclic links, and make performance quite bad. + * + * By default, a `**` in a pattern will follow 1 symbolic link if + * it is not the first item in the pattern, or none if it is the + * first item in the pattern, following the same behavior as Bash. + */ + follow?: boolean; + /** + * string or string[], or an object with `ignore` and `ignoreChildren` + * methods. + * + * If a string or string[] is provided, then this is treated as a glob + * pattern or array of glob patterns to exclude from matches. To ignore all + * children within a directory, as well as the entry itself, append `'/**'` + * to the ignore pattern. + * + * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of + * any other settings. + * + * If an object is provided that has `ignored(path)` and/or + * `childrenIgnored(path)` methods, then these methods will be called to + * determine whether any Path is a match or if its children should be + * traversed, respectively. + */ + ignore?: string | string[] | IgnoreLike; + /** + * Treat brace expansion like `{a,b}` as a "magic" pattern. Has no + * effect if {@link nobrace} is set. + * + * Only has effect on the {@link hasMagic} function. + */ + magicalBraces?: boolean; + /** + * Add a `/` character to directory matches. Note that this requires + * additional stat calls in some cases. + */ + mark?: boolean; + /** + * Perform a basename-only match if the pattern does not contain any slash + * characters. That is, `*.js` would be treated as equivalent to + * `**\/*.js`, matching all js files in all directories. + */ + matchBase?: boolean; + /** + * Limit the directory traversal to a given depth below the cwd. + * Note that this does NOT prevent traversal to sibling folders, + * root patterns, and so on. It only limits the maximum folder depth + * that the walk will descend, relative to the cwd. + */ + maxDepth?: number; + /** + * Do not expand `{a,b}` and `{1..3}` brace sets. + */ + nobrace?: boolean; + /** + * Perform a case-insensitive match. This defaults to `true` on macOS and + * Windows systems, and `false` on all others. + * + * **Note** `nocase` should only be explicitly set when it is + * known that the filesystem's case sensitivity differs from the + * platform default. If set `true` on case-sensitive file + * systems, or `false` on case-insensitive file systems, then the + * walk may return more or less results than expected. + */ + nocase?: boolean; + /** + * Do not match directories, only files. (Note: to match + * _only_ directories, put a `/` at the end of the pattern.) + */ + nodir?: boolean; + /** + * Do not match "extglob" patterns such as `+(a|b)`. + */ + noext?: boolean; + /** + * Do not match `**` against multiple filenames. (Ie, treat it as a normal + * `*` instead.) + * + * Conflicts with {@link matchBase} + */ + noglobstar?: boolean; + /** + * Defaults to value of `process.platform` if available, or `'linux'` if + * not. Setting `platform:'win32'` on non-Windows systems may cause strange + * behavior. + */ + platform?: NodeJS.Platform; + /** + * Set to true to call `fs.realpath` on all of the + * results. In the case of an entry that cannot be resolved, the + * entry is omitted. This incurs a slight performance penalty, of + * course, because of the added system calls. + */ + realpath?: boolean; + /** + * + * A string path resolved against the `cwd` option, which + * is used as the starting point for absolute patterns that start + * with `/`, (but not drive letters or UNC paths on Windows). + * + * Note that this _doesn't_ necessarily limit the walk to the + * `root` directory, and doesn't affect the cwd starting point for + * non-absolute patterns. A pattern containing `..` will still be + * able to traverse out of the root directory, if it is not an + * actual root directory on the filesystem, and any non-absolute + * patterns will be matched in the `cwd`. For example, the + * pattern `/../*` with `{root:'/some/path'}` will return all + * files in `/some`, not all files in `/some/path`. The pattern + * `*` with `{root:'/some/path'}` will return all the entries in + * the cwd, not the entries in `/some/path`. + * + * To start absolute and non-absolute patterns in the same + * path, you can use `{root:''}`. However, be aware that on + * Windows systems, a pattern like `x:/*` or `//host/share/*` will + * _always_ start in the `x:/` or `//host/share` directory, + * regardless of the `root` setting. + */ + root?: string; + /** + * A [PathScurry](http://npm.im/path-scurry) object used + * to traverse the file system. If the `nocase` option is set + * explicitly, then any provided `scurry` object must match this + * setting. + */ + scurry?: PathScurry; + /** + * Call `lstat()` on all entries, whether required or not to determine + * if it's a valid match. When used with {@link withFileTypes}, this means + * that matches will include data such as modified time, permissions, and + * so on. Note that this will incur a performance cost due to the added + * system calls. + */ + stat?: boolean; + /** + * An AbortSignal which will cancel the Glob walk when + * triggered. + */ + signal?: AbortSignal; + /** + * Use `\\` as a path separator _only_, and + * _never_ as an escape character. If set, all `\\` characters are + * replaced with `/` in the pattern. + * + * Note that this makes it **impossible** to match against paths + * containing literal glob pattern characters, but allows matching + * with patterns constructed using `path.join()` and + * `path.resolve()` on Windows platforms, mimicking the (buggy!) + * behavior of Glob v7 and before on Windows. Please use with + * caution, and be mindful of [the caveat below about Windows + * paths](#windows). (For legacy reasons, this is also set if + * `allowWindowsEscape` is set to the exact value `false`.) + */ + windowsPathsNoEscape?: boolean; + /** + * Return [PathScurry](http://npm.im/path-scurry) + * `Path` objects instead of strings. These are similar to a + * NodeJS `Dirent` object, but with additional methods and + * properties. + * + * Conflicts with {@link absolute} + */ + withFileTypes?: boolean; + /** + * An fs implementation to override some or all of the defaults. See + * http://npm.im/path-scurry for details about what can be overridden. + */ + fs?: FSOption; + /** + * Just passed along to Minimatch. Note that this makes all pattern + * matching operations slower and *extremely* noisy. + */ + debug?: boolean; + /** + * Return `/` delimited paths, even on Windows. + * + * On posix systems, this has no effect. But, on Windows, it means that + * paths will be `/` delimited, and absolute paths will be their full + * resolved UNC forms, eg instead of `'C:\\foo\\bar'`, it would return + * `'//?/C:/foo/bar'` + */ + posix?: boolean; +} +export type GlobOptionsWithFileTypesTrue = GlobOptions & { + withFileTypes: true; + absolute?: undefined; + mark?: undefined; + posix?: undefined; +}; +export type GlobOptionsWithFileTypesFalse = GlobOptions & { + withFileTypes?: false; +}; +export type GlobOptionsWithFileTypesUnset = GlobOptions & { + withFileTypes?: undefined; +}; +export type Result = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path; +export type Results = Result[]; +export type FileTypes = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean; +/** + * An object that can perform glob pattern traversals. + */ +export declare class Glob implements GlobOptions { + absolute?: boolean; + cwd: string; + root?: string; + dot: boolean; + dotRelative: boolean; + follow: boolean; + ignore?: string | string[] | IgnoreLike; + magicalBraces: boolean; + mark?: boolean; + matchBase: boolean; + maxDepth: number; + nobrace: boolean; + nocase: boolean; + nodir: boolean; + noext: boolean; + noglobstar: boolean; + pattern: string[]; + platform: NodeJS.Platform; + realpath: boolean; + scurry: PathScurry; + stat: boolean; + signal?: AbortSignal; + windowsPathsNoEscape: boolean; + withFileTypes: FileTypes; + /** + * The options provided to the constructor. + */ + opts: Opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns: Pattern[]; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern: string | string[], opts: Opts); + /** + * Returns a Promise that resolves to the results array. + */ + walk(): Promise>; + /** + * synchronous {@link Glob.walk} + */ + walkSync(): Results; + /** + * Stream results asynchronously. + */ + stream(): Minipass, Result>; + /** + * Stream results synchronously. + */ + streamSync(): Minipass, Result>; + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync(): Generator, void, void>; + [Symbol.iterator](): Generator, void, void>; + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate(): AsyncGenerator, void, void>; + [Symbol.asyncIterator](): AsyncGenerator, void, void>; +} +//# sourceMappingURL=glob.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map new file mode 100644 index 00000000000000..6353d8b3c47126 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js new file mode 100644 index 00000000000000..eb37c6b9a6601e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js @@ -0,0 +1,238 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const path_scurry_1 = require("path-scurry"); +const url_1 = require("url"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' + ? path_scurry_1.PathScurryDarwin + : opts.platform + ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map new file mode 100644 index 00000000000000..7a7a9b28627480 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AArQD,oBAqQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts new file mode 100644 index 00000000000000..8aec3bd9725175 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts @@ -0,0 +1,14 @@ +import { GlobOptions } from './glob.js'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; +//# sourceMappingURL=has-magic.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map new file mode 100644 index 00000000000000..dd5053f80b44c3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js new file mode 100644 index 00000000000000..0918bd57e0f1c2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map new file mode 100644 index 00000000000000..9b73cfad7d05e4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../../src/has-magic.ts"],"names":[],"mappings":";;;AAAA,yCAAqC;AAGrC;;;;;;;;;;GAUG;AACI,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC3B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;KACpB;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;QACvB,IAAI,IAAI,qBAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;KACtD;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAXY,QAAA,QAAQ,YAWpB","sourcesContent":["import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n pattern: string | string[],\n options: GlobOptions = {}\n): boolean => {\n if (!Array.isArray(pattern)) {\n pattern = [pattern]\n }\n for (const p of pattern) {\n if (new Minimatch(p, options).hasMagic()) return true\n }\n return false\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts new file mode 100644 index 00000000000000..e9d74f3b5e1291 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts @@ -0,0 +1,20 @@ +import { Minimatch } from 'minimatch'; +import { Path } from 'path-scurry'; +import { GlobWalkerOpts } from './walker.js'; +export interface IgnoreLike { + ignored?: (p: Path) => boolean; + childrenIgnored?: (p: Path) => boolean; +} +/** + * Class used to process ignored patterns + */ +export declare class Ignore implements IgnoreLike { + relative: Minimatch[]; + relativeChildren: Minimatch[]; + absolute: Minimatch[]; + absoluteChildren: Minimatch[]; + constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts); + ignored(p: Path): boolean; + childrenIgnored(p: Path): boolean; +} +//# sourceMappingURL=ignore.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map new file mode 100644 index 00000000000000..3d604838d1eed2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AACrC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;CACvC;AASD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;gBAG3B,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAiDnB,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js new file mode 100644 index 00000000000000..0cbcca335e1cca --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js @@ -0,0 +1,103 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new minimatch_1.Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform); + const m = new minimatch_1.Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map new file mode 100644 index 00000000000000..7595b4c68f79ed --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../../src/ignore.ts"],"names":[],"mappings":";AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;;;AAE7C,yCAAqC;AAErC,6CAAsC;AAQtC,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEb;;GAEG;AACH,MAAa,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAE7B,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,MAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QAED,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;YACzB,MAAM,EAAE,GAAG,IAAI,qBAAS,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACtC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;gBACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;gBACjC,MAAM,CAAC,GAAG,IAAI,oBAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;gBACrD,MAAM,CAAC,GAAG,IAAI,qBAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,CAAA;gBAC/C,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;gBACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;gBAC/B,IAAI,QAAQ;oBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBAC1B,IAAI,QAAQ,EAAE;oBACZ,IAAI,QAAQ;wBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;wBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACnC;aACF;SACF;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,IAAI,CAAA;SAC5B;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF;AAxFD,wBAwFC","sourcesContent":["// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n ignored?: (p: Path) => boolean\n childrenIgnored?: (p: Path) => boolean\n}\n\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n relative: Minimatch[]\n relativeChildren: Minimatch[]\n absolute: Minimatch[]\n absoluteChildren: Minimatch[]\n\n constructor(\n ignored: string[],\n {\n nobrace,\n nocase,\n noext,\n noglobstar,\n platform = defaultPlatform,\n }: GlobWalkerOpts\n ) {\n this.relative = []\n this.absolute = []\n this.relativeChildren = []\n this.absoluteChildren = []\n const mmopts = {\n dot: true,\n nobrace,\n nocase,\n noext,\n noglobstar,\n optimizationLevel: 2,\n platform,\n nocomment: true,\n nonegate: true,\n }\n\n // this is a little weird, but it gives us a clean set of optimized\n // minimatch matchers, without getting tripped up if one of them\n // ends in /** inside a brace section, and it's only inefficient at\n // the start of the walk, not along it.\n // It'd be nice if the Pattern class just had a .test() method, but\n // handling globstars is a bit of a pita, and that code already lives\n // in minimatch anyway.\n // Another way would be if maybe Minimatch could take its set/globParts\n // as an option, and then we could at least just use Pattern to test\n // for absolute-ness.\n // Yet another way, Minimatch could take an array of glob strings, and\n // a cwd option, and do the right thing.\n for (const ign of ignored) {\n const mm = new Minimatch(ign, mmopts)\n for (let i = 0; i < mm.set.length; i++) {\n const parsed = mm.set[i]\n const globParts = mm.globParts[i]\n const p = new Pattern(parsed, globParts, 0, platform)\n const m = new Minimatch(p.globString(), mmopts)\n const children = globParts[globParts.length - 1] === '**'\n const absolute = p.isAbsolute()\n if (absolute) this.absolute.push(m)\n else this.relative.push(m)\n if (children) {\n if (absolute) this.absoluteChildren.push(m)\n else this.relativeChildren.push(m)\n }\n }\n }\n }\n\n ignored(p: Path): boolean {\n const fullpath = p.fullpath()\n const fullpaths = `${fullpath}/`\n const relative = p.relative() || '.'\n const relatives = `${relative}/`\n for (const m of this.relative) {\n if (m.match(relative) || m.match(relatives)) return true\n }\n for (const m of this.absolute) {\n if (m.match(fullpath) || m.match(fullpaths)) return true\n }\n return false\n }\n\n childrenIgnored(p: Path): boolean {\n const fullpath = p.fullpath() + '/'\n const relative = (p.relative() || '.') + '/'\n for (const m of this.relativeChildren) {\n if (m.match(relative)) return true\n }\n for (const m of this.absoluteChildren) {\n if (m.match(fullpath)) true\n }\n return false\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts new file mode 100644 index 00000000000000..669bf12e6d5916 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts @@ -0,0 +1,95 @@ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js'; +import { Glob } from './glob.js'; +/** + * Syncronous form of {@link globStream}. Will read all the matches as fast as + * you consume them, even all in a single tick if you consume them immediately, + * but will still respond to backpressure if they're not consumed immediately. + */ +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Return a stream that emits all the strings or `Path` objects and + * then emits `end` when completed. + */ +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Synchronous form of {@link glob} + */ +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[]; +export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[]; +/** + * Perform an asynchronous glob search for the pattern(s) specified. Returns + * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the + * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for + * full option descriptions. + */ +declare function glob_(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise; +declare function glob_(pattern: string | string[], options: GlobOptions): Promise; +/** + * Return a sync iterator for walking glob pattern matches. + */ +export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator | Generator; +/** + * Return an async iterator for walking glob pattern matches. + */ +export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator | AsyncGenerator; +export declare const streamSync: typeof globStreamSync; +export declare const stream: typeof globStream & { + sync: typeof globStreamSync; +}; +export declare const iterateSync: typeof globIterateSync; +export declare const iterate: typeof globIterate & { + sync: typeof globIterateSync; +}; +export declare const sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; +}; +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export type { IgnoreLike } from './ignore.js'; +export type { MatchStream } from './walker.js'; +export declare const glob: typeof glob_ & { + glob: typeof glob_; + globSync: typeof globSync; + sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; + }; + globStream: typeof globStream; + stream: typeof globStream & { + sync: typeof globStreamSync; + }; + globStreamSync: typeof globStreamSync; + streamSync: typeof globStreamSync; + globIterate: typeof globIterate; + iterate: typeof globIterate & { + sync: typeof globIterateSync; + }; + globIterateSync: typeof globIterateSync; + iterateSync: typeof globIterateSync; + Glob: typeof Glob; + hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; + escape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string; + unescape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map new file mode 100644 index 00000000000000..4e9ba085ce45b2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQ9D;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AASxE,eAAO,MAAM,UAAU,uBAAiB,CAAA;AACxC,eAAO,MAAM,MAAM;;CAAsD,CAAA;AACzE,eAAO,MAAM,WAAW,wBAAkB,CAAA;AAC1C,eAAO,MAAM,OAAO;;CAElB,CAAA;AACF,eAAO,MAAM,IAAI;;;CAGf,CAAA;AAGF,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAG9C,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;CAgBf,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js new file mode 100644 index 00000000000000..71c31c03dd339b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = void 0; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +exports.globStreamSync = globStreamSync; +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +exports.globStream = globStream; +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +exports.globSync = globSync; +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +exports.globIterateSync = globIterateSync; +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +exports.globIterate = globIterate; +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +/* c8 ignore stop */ +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map new file mode 100644 index 00000000000000..060338fbd1b94b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA4C;AAS5C,uCAAgC;AAChC,iDAAyC;AAuBzC,SAAgB,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AALD,wCAKC;AAsBD,SAAgB,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AALD,gCAKC;AAqBD,SAAgB,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AALD,4BAKC;AAwBD,KAAK,UAAU,KAAK,CAClB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,SAAgB,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AALD,0CAKC;AAqBD,SAAgB,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AALD,kCAKC;AAED,iEAAiE;AACpD,QAAA,UAAU,GAAG,cAAc,CAAA;AAC3B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAA;AAC5D,QAAA,WAAW,GAAG,eAAe,CAAA;AAC7B,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;IAChD,IAAI,EAAE,eAAe;CACtB,CAAC,CAAA;AACW,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;IAC1C,MAAM,EAAE,cAAc;IACtB,OAAO,EAAE,eAAe;CACzB,CAAC,CAAA;AAEF,qBAAqB;AACrB,uCAA4C;AAAnC,mGAAA,MAAM,OAAA;AAAE,qGAAA,QAAQ,OAAA;AACzB,qCAAgC;AAAvB,+FAAA,IAAI,OAAA;AAOb,+CAAyC;AAAhC,wGAAA,QAAQ,OAAA;AAGjB,oBAAoB;AAEP,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;IACvC,IAAI,EAAE,KAAK;IACX,QAAQ;IACR,IAAI,EAAJ,YAAI;IACJ,UAAU;IACV,MAAM,EAAN,cAAM;IACN,cAAc;IACd,UAAU,EAAV,kBAAU;IACV,WAAW;IACX,OAAO,EAAP,eAAO;IACP,eAAe;IACf,WAAW,EAAX,mBAAW;IACX,IAAI,EAAJ,cAAI;IACJ,QAAQ,EAAR,uBAAQ;IACR,MAAM,EAAN,kBAAM;IACN,QAAQ,EAAR,oBAAQ;CACT,CAAC,CAAA;AACF,YAAI,CAAC,IAAI,GAAG,YAAI,CAAA","sourcesContent":["import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesUnset\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Path[]\nexport function globSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions\n): Path[] | string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions\n): Generator | Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n stream: globStreamSync,\n iterate: globIterateSync,\n})\n\n/* c8 ignore start */\nexport { escape, unescape } from 'minimatch'\nexport { Glob } from './glob.js'\nexport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n/* c8 ignore stop */\n\nexport const glob = Object.assign(glob_, {\n glob: glob_,\n globSync,\n sync,\n globStream,\n stream,\n globStreamSync,\n streamSync,\n globIterate,\n iterate,\n globIterateSync,\n iterateSync,\n Glob,\n hasMagic,\n escape,\n unescape,\n})\nglob.glob = glob\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts new file mode 100644 index 00000000000000..109cc4e7a5dae3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts @@ -0,0 +1,77 @@ +/// +import { GLOBSTAR } from 'minimatch'; +export type MMPattern = string | RegExp | typeof GLOBSTAR; +export type PatternList = [p: MMPattern, ...rest: MMPattern[]]; +export type UNCPatternList = [ + p0: '', + p1: '', + p2: string, + p3: string, + ...rest: MMPattern[] +]; +export type DrivePatternList = [p0: string, ...rest: MMPattern[]]; +export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]; +export type GlobList = [p: string, ...rest: string[]]; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export declare class Pattern { + #private; + readonly length: number; + constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform); + /** + * The first entry in the parsed list of patterns + */ + pattern(): MMPattern; + /** + * true of if pattern() returns a string + */ + isString(): boolean; + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar(): boolean; + /** + * true if pattern() returns a regexp + */ + isRegExp(): boolean; + /** + * The /-joined set of glob parts that make up this pattern + */ + globString(): string; + /** + * true if there are more pattern parts after this one + */ + hasMore(): boolean; + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest(): Pattern | null; + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC(): boolean; + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive(): boolean; + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute(): boolean; + /** + * consume the root of the pattern, and return it + */ + root(): string; + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar(): boolean; + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar(): boolean; +} +//# sourceMappingURL=pattern.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map new file mode 100644 index 00000000000000..48430f63db0947 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../../src/pattern.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IAOd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js new file mode 100644 index 00000000000000..181371293d8605 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map new file mode 100644 index 00000000000000..ba5293ff9f2489 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.js","sourceRoot":"","sources":["../../../src/pattern.ts"],"names":[],"mappings":";AAAA,yEAAyE;;;AAEzE,yCAAoC;AAgBpC,MAAM,aAAa,GAAG,CAAC,EAAe,EAAqB,EAAE,CAC3D,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAChB,MAAM,UAAU,GAAG,CAAC,EAAY,EAAkB,EAAE,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAEnE;;;GAGG;AACH,MAAa,OAAO;IACT,YAAY,CAAa;IACzB,SAAS,CAAU;IACnB,MAAM,CAAQ;IACd,MAAM,CAAQ;IACd,SAAS,CAAiB;IACnC,KAAK,CAAiB;IACtB,WAAW,CAAS;IACpB,QAAQ,CAAU;IAClB,MAAM,CAAU;IAChB,WAAW,CAAU;IACrB,eAAe,GAAY,IAAI,CAAA;IAE/B,YACE,WAAwB,EACxB,QAAkB,EAClB,KAAa,EACb,QAAyB;QAEzB,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE;YAC/B,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;SACvC;QACD,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE;YAC1C,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAA;SACrE;QACD,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAA;QAChC,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE;YACrC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QAEzB,mEAAmE;QACnE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;YACrB,gBAAgB;YAChB,iBAAiB;YACjB,uBAAuB;YACvB,oCAAoC;YACpC,qCAAqC;YACrC,2CAA2C;YAC3C,uBAAuB;YACvB,aAAa;YACb,IAAI,IAAI,CAAC,KAAK,EAAE,EAAE;gBAChB,6BAA6B;gBAC7B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACpD,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACjD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;iBAAM,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBAC9C,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACxC,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACrC,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAI,EAAa,GAAG,GAAG,CAAA;gBAC9B,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;SACF;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACvC,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IAC3D,CAAC;IACD;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,oBAAQ,CAAA;IACpD,CAAC;IACD;;OAEG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,MAAM,CAAA;IACzD,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,IAAI,CAAC,WAAW;YACtB,IAAI,CAAC,WAAW;gBAChB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;oBAChB,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE;wBACjB,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;wBACvD,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;oBAC5B,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACtC,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,KAAK,KAAK,SAAS;YAAE,OAAO,IAAI,CAAC,KAAK,CAAA;QAC/C,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YAAE,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,GAAG,IAAI,OAAO,CACtB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,MAAM,GAAG,CAAC,EACf,IAAI,CAAC,SAAS,CACf,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;QACzC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAC/B,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QACnC,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS;YAC9B,CAAC,CAAC,IAAI,CAAC,MAAM;YACb,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;gBACV,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;oBACP,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;IAChB,CAAC;IAED,sBAAsB;IACtB,sBAAsB;IACtB,mEAAmE;IACnE,sEAAsE;IACtE,6CAA6C;IAC7C;;OAEG;IACH,OAAO;QACL,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,QAAQ,KAAK,SAAS;YAChC,CAAC,CAAC,IAAI,CAAC,QAAQ;YACf,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ;gBACZ,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,IAAI,CAAC,MAAM,GAAG,CAAC;oBACf,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAChC,CAAC;IAED,sCAAsC;IACtC,kDAAkD;IAClD,oDAAoD;IACpD;;OAEG;IACH,UAAU;QACR,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS;YACnC,CAAC,CAAC,IAAI,CAAC,WAAW;YAClB,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW;gBACf,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC/B,IAAI,CAAC,OAAO,EAAE;oBACd,IAAI,CAAC,KAAK,EAAE,CAAC,CAAA;IACrB,CAAC;IAED;;OAEG;IACH,IAAI;QACF,MAAM,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;QAC9B,OAAO,OAAO,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YACpE,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED;;;OAGG;IACH,mBAAmB;QACjB,OAAO,CAAC,CACN,IAAI,CAAC,MAAM,KAAK,CAAC;YACjB,CAAC,IAAI,CAAC,UAAU,EAAE;YAClB,CAAC,IAAI,CAAC,eAAe,CACtB,CAAA;IACH,CAAC;IAED;;OAEG;IACH,kBAAkB;QAChB,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,eAAe;YAClE,OAAO,KAAK,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,KAAK,CAAA;QAC5B,OAAO,IAAI,CAAA;IACb,CAAC;CACF;AAnOD,0BAmOC","sourcesContent":["// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n p0: '',\n p1: '',\n p2: string,\n p3: string,\n ...rest: MMPattern[]\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n readonly #patternList: PatternList\n readonly #globList: GlobList\n readonly #index: number\n readonly length: number\n readonly #platform: NodeJS.Platform\n #rest?: Pattern | null\n #globString?: string\n #isDrive?: boolean\n #isUNC?: boolean\n #isAbsolute?: boolean\n #followGlobstar: boolean = true\n\n constructor(\n patternList: MMPattern[],\n globList: string[],\n index: number,\n platform: NodeJS.Platform\n ) {\n if (!isPatternList(patternList)) {\n throw new TypeError('empty pattern list')\n }\n if (!isGlobList(globList)) {\n throw new TypeError('empty glob list')\n }\n if (globList.length !== patternList.length) {\n throw new TypeError('mismatched pattern list and glob list lengths')\n }\n this.length = patternList.length\n if (index < 0 || index >= this.length) {\n throw new TypeError('index out of range')\n }\n this.#patternList = patternList\n this.#globList = globList\n this.#index = index\n this.#platform = platform\n\n // normalize root entries of absolute patterns on initial creation.\n if (this.#index === 0) {\n // c: => ['c:/']\n // C:/ => ['C:/']\n // C:/x => ['C:/', 'x']\n // //host/share => ['//host/share/']\n // //host/share/ => ['//host/share/']\n // //host/share/x => ['//host/share/', 'x']\n // /etc => ['/', 'etc']\n // / => ['/']\n if (this.isUNC()) {\n // '' / '' / 'host' / 'share'\n const [p0, p1, p2, p3, ...prest] = this.#patternList\n const [g0, g1, g2, g3, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = [p0, p1, p2, p3, ''].join('/')\n const g = [g0, g1, g2, g3, ''].join('/')\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n } else if (this.isDrive() || this.isAbsolute()) {\n const [p1, ...prest] = this.#patternList\n const [g1, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = (p1 as string) + '/'\n const g = g1 + '/'\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n }\n }\n }\n\n /**\n * The first entry in the parsed list of patterns\n */\n pattern(): MMPattern {\n return this.#patternList[this.#index]\n }\n\n /**\n * true of if pattern() returns a string\n */\n isString(): boolean {\n return typeof this.#patternList[this.#index] === 'string'\n }\n /**\n * true of if pattern() returns GLOBSTAR\n */\n isGlobstar(): boolean {\n return this.#patternList[this.#index] === GLOBSTAR\n }\n /**\n * true if pattern() returns a regexp\n */\n isRegExp(): boolean {\n return this.#patternList[this.#index] instanceof RegExp\n }\n\n /**\n * The /-joined set of glob parts that make up this pattern\n */\n globString(): string {\n return (this.#globString =\n this.#globString ||\n (this.#index === 0\n ? this.isAbsolute()\n ? this.#globList[0] + this.#globList.slice(1).join('/')\n : this.#globList.join('/')\n : this.#globList.slice(this.#index).join('/')))\n }\n\n /**\n * true if there are more pattern parts after this one\n */\n hasMore(): boolean {\n return this.length > this.#index + 1\n }\n\n /**\n * The rest of the pattern after this part, or null if this is the end\n */\n rest(): Pattern | null {\n if (this.#rest !== undefined) return this.#rest\n if (!this.hasMore()) return (this.#rest = null)\n this.#rest = new Pattern(\n this.#patternList,\n this.#globList,\n this.#index + 1,\n this.#platform\n )\n this.#rest.#isAbsolute = this.#isAbsolute\n this.#rest.#isUNC = this.#isUNC\n this.#rest.#isDrive = this.#isDrive\n return this.#rest\n }\n\n /**\n * true if the pattern represents a //unc/path/ on windows\n */\n isUNC(): boolean {\n const pl = this.#patternList\n return this.#isUNC !== undefined\n ? this.#isUNC\n : (this.#isUNC =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n pl[0] === '' &&\n pl[1] === '' &&\n typeof pl[2] === 'string' &&\n !!pl[2] &&\n typeof pl[3] === 'string' &&\n !!pl[3])\n }\n\n // pattern like C:/...\n // split = ['C:', ...]\n // XXX: would be nice to handle patterns like `c:*` to test the cwd\n // in c: for *, but I don't know of a way to even figure out what that\n // cwd is without actually chdir'ing into it?\n /**\n * True if the pattern starts with a drive letter on Windows\n */\n isDrive(): boolean {\n const pl = this.#patternList\n return this.#isDrive !== undefined\n ? this.#isDrive\n : (this.#isDrive =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n this.length > 1 &&\n typeof pl[0] === 'string' &&\n /^[a-z]:$/i.test(pl[0]))\n }\n\n // pattern = '/' or '/...' or '/x/...'\n // split = ['', ''] or ['', ...] or ['', 'x', ...]\n // Drive and UNC both considered absolute on windows\n /**\n * True if the pattern is rooted on an absolute path\n */\n isAbsolute(): boolean {\n const pl = this.#patternList\n return this.#isAbsolute !== undefined\n ? this.#isAbsolute\n : (this.#isAbsolute =\n (pl[0] === '' && pl.length > 1) ||\n this.isDrive() ||\n this.isUNC())\n }\n\n /**\n * consume the root of the pattern, and return it\n */\n root(): string {\n const p = this.#patternList[0]\n return typeof p === 'string' && this.isAbsolute() && this.#index === 0\n ? p\n : ''\n }\n\n /**\n * Check to see if the current globstar pattern is allowed to follow\n * a symbolic link.\n */\n checkFollowGlobstar(): boolean {\n return !(\n this.#index === 0 ||\n !this.isGlobstar() ||\n !this.#followGlobstar\n )\n }\n\n /**\n * Mark that the current globstar pattern is following a symbolic link\n */\n markFollowGlobstar(): boolean {\n if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n return false\n this.#followGlobstar = false\n return true\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts new file mode 100644 index 00000000000000..ccedfbf2820f7d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts @@ -0,0 +1,59 @@ +import { MMRegExp } from 'minimatch'; +import { Path } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobWalkerOpts } from './walker.js'; +/** + * A cache of which patterns have been processed for a given Path + */ +export declare class HasWalkedCache { + store: Map>; + constructor(store?: Map>); + copy(): HasWalkedCache; + hasWalked(target: Path, pattern: Pattern): boolean | undefined; + storeWalked(target: Path, pattern: Pattern): void; +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export declare class MatchRecord { + store: Map; + add(target: Path, absolute: boolean, ifDir: boolean): void; + entries(): [Path, boolean, boolean][]; +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export declare class SubWalks { + store: Map; + add(target: Path, pattern: Pattern): void; + get(target: Path): Pattern[]; + entries(): [Path, Pattern[]][]; + keys(): Path[]; +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export declare class Processor { + hasWalkedCache: HasWalkedCache; + matches: MatchRecord; + subwalks: SubWalks; + patterns?: Pattern[]; + follow: boolean; + dot: boolean; + opts: GlobWalkerOpts; + constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache); + processPatterns(target: Path, patterns: Pattern[]): this; + subwalkTargets(): Path[]; + child(): Processor; + filterEntries(parent: Path, entries: Path[]): Processor; + testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void; + testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void; + testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void; +} +//# sourceMappingURL=processor.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map new file mode 100644 index 00000000000000..ca6c63ca264b27 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IASjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAwGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js new file mode 100644 index 00000000000000..bd067e9b9033dc --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js @@ -0,0 +1,309 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map new file mode 100644 index 00000000000000..bcbac1f723f983 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.js","sourceRoot":"","sources":["../../../src/processor.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAErE,yCAA8C;AAK9C;;GAEG;AACH,MAAa,cAAc;IACzB,KAAK,CAA0B;IAC/B,YAAY,QAAkC,IAAI,GAAG,EAAE;QACrD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IACD,IAAI;QACF,OAAO,IAAI,cAAc,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAChD,CAAC;IACD,SAAS,CAAC,MAAY,EAAE,OAAgB;QACtC,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,EAAE,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,CAAC;IACD,WAAW,CAAC,MAAY,EAAE,OAAgB;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAClC,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;QACvC,IAAI,MAAM;YAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;;YACvC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;IAChE,CAAC;CACF;AAjBD,wCAiBC;AAED;;;;GAIG;AACH,MAAa,WAAW;IACtB,KAAK,GAAsB,IAAI,GAAG,EAAE,CAAA;IACpC,GAAG,CAAC,MAAY,EAAE,QAAiB,EAAE,KAAc;QACjD,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAA;IACjE,CAAC;IACD,yBAAyB;IACzB,OAAO;QACL,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;YAClD,IAAI;YACJ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YACT,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;SACV,CAAC,CAAA;IACJ,CAAC;CACF;AAfD,kCAeC;AAED;;;GAGG;AACH,MAAa,QAAQ;IACnB,KAAK,GAAyB,IAAI,GAAG,EAAE,CAAA;IACvC,GAAG,CAAC,MAAY,EAAE,OAAgB;QAChC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE;YACxB,OAAM;SACP;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,KAAK,OAAO,CAAC,UAAU,EAAE,CAAC,EAAE;gBAC5D,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;SACF;;YAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAA;IAC1C,CAAC;IACD,GAAG,CAAC,MAAY;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,qBAAqB;QACrB,IAAI,CAAC,IAAI,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAA;SACnD;QACD,oBAAoB;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAc,CAAC,CAAC,CAAA;IAClE,CAAC;IACD,IAAI;QACF,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3D,CAAC;CACF;AA5BD,4BA4BC;AAED;;;;;GAKG;AACH,MAAa,SAAS;IACpB,cAAc,CAAgB;IAC9B,OAAO,GAAG,IAAI,WAAW,EAAE,CAAA;IAC3B,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAA;IACzB,QAAQ,CAAY;IACpB,MAAM,CAAS;IACf,GAAG,CAAS;IACZ,IAAI,CAAgB;IAEpB,YAAY,IAAoB,EAAE,cAA+B;QAC/D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,cAAc,GAAG,cAAc;YAClC,CAAC,CAAC,cAAc,CAAC,IAAI,EAAE;YACvB,CAAC,CAAC,IAAI,cAAc,EAAE,CAAA;IAC1B,CAAC;IAED,eAAe,CAAC,MAAY,EAAE,QAAmB;QAC/C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,MAAM,aAAa,GAAsB,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QAEvE,gEAAgE;QAChE,uCAAuC;QAEvC,KAAK,IAAI,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,aAAa,EAAE;YACtC,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAE3C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YAC3B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,KAAK,CAAA;YAErE,kCAAkC;YAClC,IAAI,IAAI,EAAE;gBACR,CAAC,GAAG,CAAC,CAAC,OAAO,CACX,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,SAAS;oBAC1C,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI;oBAChB,CAAC,CAAC,IAAI,CACT,CAAA;gBACD,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,IAAI,EAAE;oBACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;oBAChC,SAAQ;iBACT;qBAAM;oBACL,OAAO,GAAG,IAAI,CAAA;iBACf;aACF;YAED,IAAI,CAAC,CAAC,QAAQ,EAAE;gBAAE,SAAQ;YAE1B,IAAI,CAAY,CAAA;YAChB,IAAI,IAAoB,CAAA;YACxB,IAAI,OAAO,GAAG,KAAK,CAAA;YACnB,OACE,OAAO,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,QAAQ;gBAC3C,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,EACvB;gBACA,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACtB,sDAAsD;gBACtD,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,IAAI;oBAAE,MAAK;gBACtC,CAAC,GAAG,CAAC,CAAA;gBACL,OAAO,GAAG,IAAI,CAAA;gBACd,OAAO,GAAG,IAAI,CAAA;aACf;YACD,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;YACrB,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YACrB,IAAI,OAAO,EAAE;gBACX,IAAI,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC;oBAAE,SAAQ;gBACvD,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC5C;YAED,uDAAuD;YACvD,qCAAqC;YACrC,kDAAkD;YAClD,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE;gBACzB,sBAAsB;gBACtB,IAAI,CAAC,IAAI,EAAE;oBACT,MAAM,KAAK,GAAG,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,CAAA;oBACjD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;iBAChD;qBAAM;oBACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,SAAQ;aACT;iBAAM,IAAI,CAAC,KAAK,oBAAQ,EAAE;gBACzB,wCAAwC;gBACxC,4CAA4C;gBAC5C,wDAAwD;gBACxD,4DAA4D;gBAC5D,gEAAgE;gBAChE,IACE,CAAC,CAAC,CAAC,cAAc,EAAE;oBACnB,IAAI,CAAC,MAAM;oBACX,OAAO,CAAC,mBAAmB,EAAE,EAC7B;oBACA,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,CAAA;gBAC1B,MAAM,KAAK,GAAG,IAAI,EAAE,IAAI,EAAE,CAAA;gBAC1B,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;oBAClD,iDAAiD;oBACjD,6CAA6C;oBAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAA;iBACvD;qBAAM;oBACL,IAAI,EAAE,KAAK,IAAI,EAAE;wBACf,wDAAwD;wBACxD,wDAAwD;wBACxD,qBAAqB;wBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;wBACxB,oBAAoB;wBACpB,IAAI,CAAC,KAAK;4BAAE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;6BAC3C,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC,EAAE;4BAClD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;yBAC7B;qBACF;iBACF;aACF;iBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;gBAC9B,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC9B;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAA;IAC7B,CAAC;IAED,KAAK;QACH,OAAO,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;IACtD,CAAC;IAED,0DAA0D;IAC1D,yCAAyC;IACzC,6CAA6C;IAC7C,2BAA2B;IAC3B,aAAa,CAAC,MAAY,EAAE,OAAe;QACzC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QAC1C,yDAAyD;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,EAAE,CAAA;QAC5B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;YACvB,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,CAAA;gBACrC,MAAM,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;gBAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,KAAK,oBAAQ,EAAE;oBAClB,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACjD;qBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;oBAC9B,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;qBAAM;oBACL,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;aACF;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED,YAAY,CACV,CAAO,EACP,OAAgB,EAChB,IAAoB,EACpB,QAAiB;QAEjB,IAAI,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACvC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;gBACtB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;aACrC;YACD,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE;gBAClB,2DAA2D;gBAC3D,gEAAgE;gBAChE,+DAA+D;gBAC/D,iEAAiE;gBACjE,uDAAuD;gBACvD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE;oBACtC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;qBAAM,IAAI,CAAC,CAAC,cAAc,EAAE,EAAE;oBAC7B,IAAI,IAAI,IAAI,OAAO,CAAC,mBAAmB,EAAE,EAAE;wBACzC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;qBAC3B;yBAAM,IAAI,OAAO,CAAC,kBAAkB,EAAE,EAAE;wBACvC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;qBAC9B;iBACF;aACF;SACF;QACD,sDAAsD;QACtD,YAAY;QACZ,IAAI,IAAI,EAAE;YACR,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAA;YACzB,IACE,OAAO,EAAE,KAAK,QAAQ;gBACtB,sCAAsC;gBACtC,EAAE,KAAK,IAAI;gBACX,EAAE,KAAK,EAAE;gBACT,EAAE,KAAK,GAAG,EACV;gBACA,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;iBAAM,IAAI,EAAE,KAAK,IAAI,EAAE;gBACtB,qBAAqB;gBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;gBACxB,oBAAoB;gBACpB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;aAC5B;iBAAM,IAAI,EAAE,YAAY,MAAM,EAAE;gBAC/B,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;SACF;IACH,CAAC;IAED,UAAU,CACR,CAAO,EACP,CAAW,EACX,IAAoB,EACpB,QAAiB;QAEjB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;YAAE,OAAM;QAC3B,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;IAED,UAAU,CAAC,CAAO,EAAE,CAAS,EAAE,IAAoB,EAAE,QAAiB;QACpE,uBAAuB;QACvB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAAE,OAAM;QACzB,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;CACF;AApOD,8BAoOC","sourcesContent":["// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n store: Map>\n constructor(store: Map> = new Map()) {\n this.store = store\n }\n copy() {\n return new HasWalkedCache(new Map(this.store))\n }\n hasWalked(target: Path, pattern: Pattern) {\n return this.store.get(target.fullpath())?.has(pattern.globString())\n }\n storeWalked(target: Path, pattern: Pattern) {\n const fullpath = target.fullpath()\n const cached = this.store.get(fullpath)\n if (cached) cached.add(pattern.globString())\n else this.store.set(fullpath, new Set([pattern.globString()]))\n }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n store: Map = new Map()\n add(target: Path, absolute: boolean, ifDir: boolean) {\n const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n const current = this.store.get(target)\n this.store.set(target, current === undefined ? n : n & current)\n }\n // match, absolute, ifdir\n entries(): [Path, boolean, boolean][] {\n return [...this.store.entries()].map(([path, n]) => [\n path,\n !!(n & 2),\n !!(n & 1),\n ])\n }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n store: Map = new Map()\n add(target: Path, pattern: Pattern) {\n if (!target.canReaddir()) {\n return\n }\n const subs = this.store.get(target)\n if (subs) {\n if (!subs.find(p => p.globString() === pattern.globString())) {\n subs.push(pattern)\n }\n } else this.store.set(target, [pattern])\n }\n get(target: Path): Pattern[] {\n const subs = this.store.get(target)\n /* c8 ignore start */\n if (!subs) {\n throw new Error('attempting to walk unknown path')\n }\n /* c8 ignore stop */\n return subs\n }\n entries(): [Path, Pattern[]][] {\n return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n }\n keys(): Path[] {\n return [...this.store.keys()].filter(t => t.canReaddir())\n }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n hasWalkedCache: HasWalkedCache\n matches = new MatchRecord()\n subwalks = new SubWalks()\n patterns?: Pattern[]\n follow: boolean\n dot: boolean\n opts: GlobWalkerOpts\n\n constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n this.opts = opts\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.hasWalkedCache = hasWalkedCache\n ? hasWalkedCache.copy()\n : new HasWalkedCache()\n }\n\n processPatterns(target: Path, patterns: Pattern[]) {\n this.patterns = patterns\n const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n // map of paths to the magic-starting subwalks they need to walk\n // first item in patterns is the filter\n\n for (let [t, pattern] of processingSet) {\n this.hasWalkedCache.storeWalked(t, pattern)\n\n const root = pattern.root()\n const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n // start absolute patterns at root\n if (root) {\n t = t.resolve(\n root === '/' && this.opts.root !== undefined\n ? this.opts.root\n : root\n )\n const rest = pattern.rest()\n if (!rest) {\n this.matches.add(t, true, false)\n continue\n } else {\n pattern = rest\n }\n }\n\n if (t.isENOENT()) continue\n\n let p: MMPattern\n let rest: Pattern | null\n let changed = false\n while (\n typeof (p = pattern.pattern()) === 'string' &&\n (rest = pattern.rest())\n ) {\n const c = t.resolve(p)\n // we can be reasonably sure that .. is a readable dir\n if (c.isUnknown() && p !== '..') break\n t = c\n pattern = rest\n changed = true\n }\n p = pattern.pattern()\n rest = pattern.rest()\n if (changed) {\n if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n this.hasWalkedCache.storeWalked(t, pattern)\n }\n\n // now we have either a final string for a known entry,\n // more strings for an unknown entry,\n // or a pattern starting with magic, mounted on t.\n if (typeof p === 'string') {\n // must be final entry\n if (!rest) {\n const ifDir = p === '..' || p === '' || p === '.'\n this.matches.add(t.resolve(p), absolute, ifDir)\n } else {\n this.subwalks.add(t, pattern)\n }\n continue\n } else if (p === GLOBSTAR) {\n // if no rest, match and subwalk pattern\n // if rest, process rest and subwalk pattern\n // if it's a symlink, but we didn't get here by way of a\n // globstar match (meaning it's the first time THIS globstar\n // has traversed a symlink), then we follow it. Otherwise, stop.\n if (\n !t.isSymbolicLink() ||\n this.follow ||\n pattern.checkFollowGlobstar()\n ) {\n this.subwalks.add(t, pattern)\n }\n const rp = rest?.pattern()\n const rrest = rest?.rest()\n if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n // only HAS to be a dir if it ends in **/ or **/.\n // but ending in ** will match files as well.\n this.matches.add(t, absolute, rp === '' || rp === '.')\n } else {\n if (rp === '..') {\n // this would mean you're matching **/.. at the fs root,\n // and no thanks, I'm not gonna test that specific case.\n /* c8 ignore start */\n const tp = t.parent || t\n /* c8 ignore stop */\n if (!rrest) this.matches.add(tp, absolute, true)\n else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n this.subwalks.add(tp, rrest)\n }\n }\n }\n } else if (p instanceof RegExp) {\n this.subwalks.add(t, pattern)\n }\n }\n\n return this\n }\n\n subwalkTargets(): Path[] {\n return this.subwalks.keys()\n }\n\n child() {\n return new Processor(this.opts, this.hasWalkedCache)\n }\n\n // return a new Processor containing the subwalks for each\n // child entry, and a set of matches, and\n // a hasWalkedCache that's a copy of this one\n // then we're going to call\n filterEntries(parent: Path, entries: Path[]): Processor {\n const patterns = this.subwalks.get(parent)\n // put matches and entry walks into the results processor\n const results = this.child()\n for (const e of entries) {\n for (const pattern of patterns) {\n const absolute = pattern.isAbsolute()\n const p = pattern.pattern()\n const rest = pattern.rest()\n if (p === GLOBSTAR) {\n results.testGlobstar(e, pattern, rest, absolute)\n } else if (p instanceof RegExp) {\n results.testRegExp(e, p, rest, absolute)\n } else {\n results.testString(e, p, rest, absolute)\n }\n }\n }\n return results\n }\n\n testGlobstar(\n e: Path,\n pattern: Pattern,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (this.dot || !e.name.startsWith('.')) {\n if (!pattern.hasMore()) {\n this.matches.add(e, absolute, false)\n }\n if (e.canReaddir()) {\n // if we're in follow mode or it's not a symlink, just keep\n // testing the same pattern. If there's more after the globstar,\n // then this symlink consumes the globstar. If not, then we can\n // follow at most ONE symlink along the way, so we mark it, which\n // also checks to ensure that it wasn't already marked.\n if (this.follow || !e.isSymbolicLink()) {\n this.subwalks.add(e, pattern)\n } else if (e.isSymbolicLink()) {\n if (rest && pattern.checkFollowGlobstar()) {\n this.subwalks.add(e, rest)\n } else if (pattern.markFollowGlobstar()) {\n this.subwalks.add(e, pattern)\n }\n }\n }\n }\n // if the NEXT thing matches this entry, then also add\n // the rest.\n if (rest) {\n const rp = rest.pattern()\n if (\n typeof rp === 'string' &&\n // dots and empty were handled already\n rp !== '..' &&\n rp !== '' &&\n rp !== '.'\n ) {\n this.testString(e, rp, rest.rest(), absolute)\n } else if (rp === '..') {\n /* c8 ignore start */\n const ep = e.parent || e\n /* c8 ignore stop */\n this.subwalks.add(ep, rest)\n } else if (rp instanceof RegExp) {\n this.testRegExp(e, rp, rest.rest(), absolute)\n }\n }\n }\n\n testRegExp(\n e: Path,\n p: MMRegExp,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (!p.test(e.name)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n\n testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n // should never happen?\n if (!e.isNamed(p)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts new file mode 100644 index 00000000000000..5c1a0414971b3a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts @@ -0,0 +1,96 @@ +/// +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +import { Processor } from './processor.js'; +export interface GlobWalkerOpts { + absolute?: boolean; + allowWindowsEscape?: boolean; + cwd?: string | URL; + dot?: boolean; + dotRelative?: boolean; + follow?: boolean; + ignore?: string | string[] | IgnoreLike; + mark?: boolean; + matchBase?: boolean; + maxDepth?: number; + nobrace?: boolean; + nocase?: boolean; + nodir?: boolean; + noext?: boolean; + noglobstar?: boolean; + platform?: NodeJS.Platform; + posix?: boolean; + realpath?: boolean; + root?: string; + stat?: boolean; + signal?: AbortSignal; + windowsPathsNoEscape?: boolean; + withFileTypes?: boolean; +} +export type GWOFileTypesTrue = GlobWalkerOpts & { + withFileTypes: true; +}; +export type GWOFileTypesFalse = GlobWalkerOpts & { + withFileTypes: false; +}; +export type GWOFileTypesUnset = GlobWalkerOpts & { + withFileTypes?: undefined; +}; +export type Result = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string; +export type Matches = O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; +export type MatchStream = O extends GWOFileTypesTrue ? Minipass : O extends GWOFileTypesFalse ? Minipass : O extends GWOFileTypesUnset ? Minipass : Minipass; +/** + * basic walking utilities that all the glob walker types use + */ +export declare abstract class GlobUtil { + #private; + path: Path; + patterns: Pattern[]; + opts: O; + seen: Set; + paused: boolean; + aborted: boolean; + signal?: AbortSignal; + maxDepth: number; + constructor(patterns: Pattern[], path: Path, opts: O); + pause(): void; + resume(): void; + onResume(fn: () => any): void; + matchCheck(e: Path, ifDir: boolean): Promise; + matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined; + matchCheckSync(e: Path, ifDir: boolean): Path | undefined; + abstract matchEmit(p: Result): void; + abstract matchEmit(p: string | Path): void; + matchFinish(e: Path, absolute: boolean): void; + match(e: Path, absolute: boolean, ifDir: boolean): Promise; + matchSync(e: Path, absolute: boolean, ifDir: boolean): void; + walkCB(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void; + walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void; +} +export declare class GlobWalker extends GlobUtil { + matches: O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + walk(): Promise>; + walkSync(): Matches; +} +export declare class GlobStream extends GlobUtil { + results: O extends GWOFileTypesTrue ? Minipass : O extends GWOFileTypesFalse ? Minipass : O extends GWOFileTypesUnset ? Minipass : Minipass; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + stream(): MatchStream; + streamSync(): MatchStream; +} +//# sourceMappingURL=walker.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts.map new file mode 100644 index 00000000000000..dda062358f1998 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.d.ts","sourceRoot":"","sources":["../../../src/walker.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAOhD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAE1C,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAClB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,CAAC,EAAE,OAAO,CAAA;IAGnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAC1B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,aAAa,CAAC,EAAE,OAAO,CAAA;CACxB;AAED,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,aAAa,EAAE,IAAI,CAAA;CACpB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,EAAE,KAAK,CAAA;CACrB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACrE,IAAI,GACJ,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,IAAI,GAAG,MAAM,CAAA;AAEjB,MAAM,MAAM,OAAO,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACtE,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;AAEtB,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,cAAc,IAC9C,CAAC,SAAS,gBAAgB,GACtB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;AAY5C;;GAEG;AACH,8BAAsB,QAAQ,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;;IACtE,IAAI,EAAE,IAAI,CAAA;IACV,QAAQ,EAAE,OAAO,EAAE,CAAA;IACnB,IAAI,EAAE,CAAC,CAAA;IACP,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAkB;IACjC,MAAM,EAAE,OAAO,CAAQ;IACvB,OAAO,EAAE,OAAO,CAAQ;IAIxB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;gBAEJ,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IA8BpD,KAAK;IAGL,MAAM;IAUN,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG;IAahB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC;IAYpE,cAAc,CAAC,CAAC,EAAE,IAAI,GAAG,SAAS,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAUrE,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAYzD,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IACtC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAE1C,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO;IAsBhC,KAAK,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtE,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI;IAK3D,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAOvD,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IA2Cf,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAsBf,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAO3D,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAqCf,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;CAoBhB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;gBAEV,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAKpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAKvB,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAiBjC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC;CAWvB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;gBAE9B,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAUpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAM7B,MAAM,IAAI,WAAW,CAAC,CAAC,CAAC;IAYxB,UAAU,IAAI,WAAW,CAAC,CAAC,CAAC;CAO7B"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js new file mode 100644 index 00000000000000..9651ce1164016c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js @@ -0,0 +1,358 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) + ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js.map new file mode 100644 index 00000000000000..a7af398939ae48 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.js","sourceRoot":"","sources":["../../../src/walker.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,uCAAmC;AAEnC,2CAAgD;AAQhD,iDAA0C;AAiE1C,MAAM,UAAU,GAAG,CACjB,MAAsC,EACtC,IAAoB,EACR,EAAE,CACd,OAAO,MAAM,KAAK,QAAQ;IACxB,CAAC,CAAC,IAAI,kBAAM,CAAC,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC;IAC5B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;QACvB,CAAC,CAAC,IAAI,kBAAM,CAAC,MAAM,EAAE,IAAI,CAAC;QAC1B,CAAC,CAAC,MAAM,CAAA;AAEZ;;GAEG;AACH,MAAsB,QAAQ;IAC5B,IAAI,CAAM;IACV,QAAQ,CAAW;IACnB,IAAI,CAAG;IACP,IAAI,GAAc,IAAI,GAAG,EAAQ,CAAA;IACjC,MAAM,GAAY,KAAK,CAAA;IACvB,OAAO,GAAY,KAAK,CAAA;IACxB,SAAS,GAAkB,EAAE,CAAA;IAC7B,OAAO,CAAa;IACpB,IAAI,CAAY;IAChB,MAAM,CAAc;IACpB,QAAQ,CAAQ;IAGhB,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAA;QACjE,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,OAAO,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;SAC7C;QACD,6DAA6D;QAC7D,mBAAmB;QACnB,qBAAqB;QACrB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAA;QACzC,oBAAoB;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACzC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAA;YAC3B,CAAC,CAAC,CAAA;SACH;IACH,CAAC;IAED,QAAQ,CAAC,IAAU;QACjB,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IAC/D,CAAC;IACD,gBAAgB,CAAC,IAAU;QACzB,OAAO,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,CAAC;IAED,yBAAyB;IACzB,KAAK;QACH,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;IACpB,CAAC;IACD,MAAM;QACJ,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,oBAAoB;QACpB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,EAAE,GAA4B,SAAS,CAAA;QAC3C,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE;YACpD,EAAE,EAAE,CAAA;SACL;IACH,CAAC;IACD,QAAQ,CAAC,EAAa;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,qBAAqB;QACrB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,EAAE,EAAE,CAAA;SACL;aAAM;YACL,oBAAoB;YACpB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;SACxB;IACH,CAAC;IAED,+DAA+D;IAC/D,wCAAwC;IACxC,KAAK,CAAC,UAAU,CAAC,CAAO,EAAE,KAAc;QACtC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;YAChD,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;SACR;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,OAAO,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACnE,CAAC;IAED,cAAc,CAAC,CAAmB,EAAE,KAAc;QAChD,OAAO,CAAC;YACN,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC;YAC1D,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;YAC1B,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;YACtC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YACjB,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,SAAS,CAAA;IACf,CAAC;IAED,cAAc,CAAC,CAAO,EAAE,KAAc;QACpC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,YAAY,EAAE,CAAA;YAC5C,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;SACR;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,OAAO,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACjE,CAAC;IAKD,WAAW,CAAC,CAAO,EAAE,QAAiB;QACpC,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YAAE,OAAM;QAC5B,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;QAC/D,4BAA4B;QAC5B,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YAC3B,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;SAClB;aAAM,IAAI,GAAG,EAAE;YACd,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,IAAI,CAAC,SAAS,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;SAC3B;aAAM;YACL,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,WAAW,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;gBACxD,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI;gBACjB,CAAC,CAAC,EAAE,CAAA;YACR,IAAI,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;SACrD;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QACpD,MAAM,CAAC,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACzC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,SAAS,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QAClD,MAAM,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACvC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,MAAM,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACrD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,wBAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAC9D,CAAC;IAED,OAAO,CACL,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAAC,CAAA;YAClE,OAAM;SACP;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;SAClD;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE;YAC1C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE;gBAC5D,SAAQ;aACT;YACD,KAAK,EAAE,CAAA;YACP,MAAM,cAAc,GAAG,CAAC,CAAC,aAAa,EAAE,CAAA;YACxC,IAAI,CAAC,CAAC,aAAa,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,cAAc,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;iBAC7C;gBACH,CAAC,CAAC,SAAS,CACT,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,EACzD,IAAI,CACL,CAAA;aACF;SACF;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,OAAO,CACL,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;SAClD;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE;YAC7D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;SACxD;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,UAAU,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACzD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,wBAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAClE,CAAC;IAED,WAAW,CACT,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CACjB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAClD,CAAA;YACD,OAAM;SACP;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACnC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE;YAC1C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE;gBAC5D,SAAQ;aACT;YACD,KAAK,EAAE,CAAA;YACP,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAA;YAChC,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;SAC/C;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,WAAW,CACT,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE;YAC7D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;SAC5D;QAED,IAAI,EAAE,CAAA;IACR,CAAC;CACF;AAlSD,4BAkSC;AAED,MAAa,UAEX,SAAQ,QAAW;IACnB,OAAO,CAMe;IAEtB,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAgB,CAAA;IACxC,CAAC;IAGD,SAAS,CAAC,CAAgB;QACxB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrB,CAAC;IAED,KAAK,CAAC,IAAI;QACR,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAA;SACxB;QACD,MAAM,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;gBACzC,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE;oBACxB,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;iBACxB;qBAAM;oBACL,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAClB;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,QAAQ;QACN,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;SACtB;QACD,4DAA4D;QAC5D,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;YAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;gBAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QACpD,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AAjDD,gCAiDC;AAED,MAAa,UAEX,SAAQ,QAAW;IACnB,OAAO,CAMmC;IAE1C,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,mBAAQ,CAAC;YAC1B,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,UAAU,EAAE,IAAI;SACjB,CAAmB,CAAA;QACpB,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;IAChD,CAAC;IAGD,SAAS,CAAC,CAAgB;QACxB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;QACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO;YAAE,IAAI,CAAC,KAAK,EAAE,CAAA;IACzC,CAAC;IAED,MAAM;QACJ,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAA;QACxB,IAAI,MAAM,CAAC,SAAS,EAAE,EAAE;YACtB,MAAM,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACvB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;YAC9D,CAAC,CAAC,CAAA;SACH;aAAM;YACL,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;SAC7D;QACD,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,UAAU;QACR,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;SACtB;QACD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACnE,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AA9CD,gCA8CC","sourcesContent":["/**\n * Single-use utility classes to provide functionality to the {@link Glob}\n * methods.\n *\n * @module\n */\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport { Ignore, IgnoreLike } from './ignore.js'\n\n// XXX can we somehow make it so that it NEVER processes a given path more than\n// once, enough that the match set tracking is no longer needed? that'd speed\n// things up a lot. Or maybe bring back nounique, and skip it in that case?\n\n// a single minimatch set entry with 1 or more parts\nimport { Pattern } from './pattern.js'\nimport { Processor } from './processor.js'\n\nexport interface GlobWalkerOpts {\n absolute?: boolean\n allowWindowsEscape?: boolean\n cwd?: string | URL\n dot?: boolean\n dotRelative?: boolean\n follow?: boolean\n ignore?: string | string[] | IgnoreLike\n mark?: boolean\n matchBase?: boolean\n // Note: maxDepth here means \"maximum actual Path.depth()\",\n // not \"maximum depth beyond cwd\"\n maxDepth?: number\n nobrace?: boolean\n nocase?: boolean\n nodir?: boolean\n noext?: boolean\n noglobstar?: boolean\n platform?: NodeJS.Platform\n posix?: boolean\n realpath?: boolean\n root?: string\n stat?: boolean\n signal?: AbortSignal\n windowsPathsNoEscape?: boolean\n withFileTypes?: boolean\n}\n\nexport type GWOFileTypesTrue = GlobWalkerOpts & {\n withFileTypes: true\n}\nexport type GWOFileTypesFalse = GlobWalkerOpts & {\n withFileTypes: false\n}\nexport type GWOFileTypesUnset = GlobWalkerOpts & {\n withFileTypes?: undefined\n}\n\nexport type Result = O extends GWOFileTypesTrue\n ? Path\n : O extends GWOFileTypesFalse\n ? string\n : O extends GWOFileTypesUnset\n ? string\n : Path | string\n\nexport type Matches = O extends GWOFileTypesTrue\n ? Set\n : O extends GWOFileTypesFalse\n ? Set\n : O extends GWOFileTypesUnset\n ? Set\n : Set\n\nexport type MatchStream =\n O extends GWOFileTypesTrue\n ? Minipass\n : O extends GWOFileTypesFalse\n ? Minipass\n : O extends GWOFileTypesUnset\n ? Minipass\n : Minipass\n\nconst makeIgnore = (\n ignore: string | string[] | IgnoreLike,\n opts: GlobWalkerOpts\n): IgnoreLike =>\n typeof ignore === 'string'\n ? new Ignore([ignore], opts)\n : Array.isArray(ignore)\n ? new Ignore(ignore, opts)\n : ignore\n\n/**\n * basic walking utilities that all the glob walker types use\n */\nexport abstract class GlobUtil {\n path: Path\n patterns: Pattern[]\n opts: O\n seen: Set = new Set()\n paused: boolean = false\n aborted: boolean = false\n #onResume: (() => any)[] = []\n #ignore?: IgnoreLike\n #sep: '\\\\' | '/'\n signal?: AbortSignal\n maxDepth: number\n\n constructor(patterns: Pattern[], path: Path, opts: O)\n constructor(patterns: Pattern[], path: Path, opts: O) {\n this.patterns = patterns\n this.path = path\n this.opts = opts\n this.#sep = !opts.posix && opts.platform === 'win32' ? '\\\\' : '/'\n if (opts.ignore) {\n this.#ignore = makeIgnore(opts.ignore, opts)\n }\n // ignore, always set with maxDepth, but it's optional on the\n // GlobOptions type\n /* c8 ignore start */\n this.maxDepth = opts.maxDepth || Infinity\n /* c8 ignore stop */\n if (opts.signal) {\n this.signal = opts.signal\n this.signal.addEventListener('abort', () => {\n this.#onResume.length = 0\n })\n }\n }\n\n #ignored(path: Path): boolean {\n return this.seen.has(path) || !!this.#ignore?.ignored?.(path)\n }\n #childrenIgnored(path: Path): boolean {\n return !!this.#ignore?.childrenIgnored?.(path)\n }\n\n // backpressure mechanism\n pause() {\n this.paused = true\n }\n resume() {\n /* c8 ignore start */\n if (this.signal?.aborted) return\n /* c8 ignore stop */\n this.paused = false\n let fn: (() => any) | undefined = undefined\n while (!this.paused && (fn = this.#onResume.shift())) {\n fn()\n }\n }\n onResume(fn: () => any) {\n if (this.signal?.aborted) return\n /* c8 ignore start */\n if (!this.paused) {\n fn()\n } else {\n /* c8 ignore stop */\n this.#onResume.push(fn)\n }\n }\n\n // do the requisite realpath/stat checking, and return the path\n // to add or undefined to filter it out.\n async matchCheck(e: Path, ifDir: boolean): Promise {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || (await e.realpath())\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir)\n }\n\n matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined {\n return e &&\n (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&\n (!ifDir || e.canReaddir()) &&\n (!this.opts.nodir || !e.isDirectory()) &&\n !this.#ignored(e)\n ? e\n : undefined\n }\n\n matchCheckSync(e: Path, ifDir: boolean): Path | undefined {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || e.realpathSync()\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir)\n }\n\n abstract matchEmit(p: Result): void\n abstract matchEmit(p: string | Path): void\n\n matchFinish(e: Path, absolute: boolean) {\n if (this.#ignored(e)) return\n const abs =\n this.opts.absolute === undefined ? absolute : this.opts.absolute\n this.seen.add(e)\n const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''\n // ok, we have what we need!\n if (this.opts.withFileTypes) {\n this.matchEmit(e)\n } else if (abs) {\n const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath()\n this.matchEmit(abs + mark)\n } else {\n const rel = this.opts.posix ? e.relativePosix() : e.relative()\n const pre =\n this.opts.dotRelative && !rel.startsWith('..' + this.#sep)\n ? '.' + this.#sep\n : ''\n this.matchEmit(!rel ? '.' + mark : pre + rel + mark)\n }\n }\n\n async match(e: Path, absolute: boolean, ifDir: boolean): Promise {\n const p = await this.matchCheck(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n matchSync(e: Path, absolute: boolean, ifDir: boolean): void {\n const p = this.matchCheckSync(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n walkCB(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() => this.walkCB2(target, patterns, processor, cb))\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const childrenCached = t.readdirCached()\n if (t.calledReaddir())\n this.walkCB3(t, childrenCached, processor, next)\n else {\n t.readdirCB(\n (_, entries) => this.walkCB3(t, entries, processor, next),\n true\n )\n }\n }\n\n next()\n }\n\n walkCB3(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2(target, patterns, processor.child(), next)\n }\n\n next()\n }\n\n walkCBSync(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2Sync(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2Sync(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() =>\n this.walkCB2Sync(target, patterns, processor, cb)\n )\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const children = t.readdirSync()\n this.walkCB3Sync(t, children, processor, next)\n }\n\n next()\n }\n\n walkCB3Sync(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2Sync(target, patterns, processor.child(), next)\n }\n\n next()\n }\n}\n\nexport class GlobWalker<\n O extends GlobWalkerOpts = GlobWalkerOpts\n> extends GlobUtil {\n matches: O extends GWOFileTypesTrue\n ? Set\n : O extends GWOFileTypesFalse\n ? Set\n : O extends GWOFileTypesUnset\n ? Set\n : Set\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.matches = new Set() as Matches\n }\n\n matchEmit(e: Result): void\n matchEmit(e: Path | string): void {\n this.matches.add(e)\n }\n\n async walk(): Promise> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n await this.path.lstat()\n }\n await new Promise((res, rej) => {\n this.walkCB(this.path, this.patterns, () => {\n if (this.signal?.aborted) {\n rej(this.signal.reason)\n } else {\n res(this.matches)\n }\n })\n })\n return this.matches\n }\n\n walkSync(): Matches {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n // nothing for the callback to do, because this never pauses\n this.walkCBSync(this.path, this.patterns, () => {\n if (this.signal?.aborted) throw this.signal.reason\n })\n return this.matches\n }\n}\n\nexport class GlobStream<\n O extends GlobWalkerOpts = GlobWalkerOpts\n> extends GlobUtil {\n results: O extends GWOFileTypesTrue\n ? Minipass\n : O extends GWOFileTypesFalse\n ? Minipass\n : O extends GWOFileTypesUnset\n ? Minipass\n : Minipass\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.results = new Minipass({\n signal: this.signal,\n objectMode: true,\n }) as MatchStream\n this.results.on('drain', () => this.resume())\n this.results.on('resume', () => this.resume())\n }\n\n matchEmit(e: Result): void\n matchEmit(e: Path | string): void {\n this.results.write(e)\n if (!this.results.flowing) this.pause()\n }\n\n stream(): MatchStream {\n const target = this.path\n if (target.isUnknown()) {\n target.lstat().then(() => {\n this.walkCB(target, this.patterns, () => this.results.end())\n })\n } else {\n this.walkCB(target, this.patterns, () => this.results.end())\n }\n return this.results\n }\n\n streamSync(): MatchStream {\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n this.walkCBSync(this.path, this.patterns, () => this.results.end())\n return this.results\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts new file mode 100644 index 00000000000000..a8b3da7722b652 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts @@ -0,0 +1,344 @@ +/// +import { Minimatch } from 'minimatch'; +import { Minipass } from 'minipass'; +import { FSOption, Path, PathScurry } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +export type MatchSet = Minimatch['set']; +export type GlobParts = Exclude; +/** + * A `GlobOptions` object may be provided to any of the exported methods, and + * must be provided to the `Glob` constructor. + * + * All options are optional, boolean, and false by default, unless otherwise + * noted. + * + * All resolved options are added to the Glob object as properties. + * + * If you are running many `glob` operations, you can pass a Glob object as the + * `options` argument to a subsequent operation to share the previously loaded + * cache. + */ +export interface GlobOptions { + /** + * Set to `true` to always receive absolute paths for + * matched files. Set to `false` to always return relative paths. + * + * When this option is not set, absolute paths are returned for patterns + * that are absolute, and otherwise paths are returned that are relative + * to the `cwd` setting. + * + * This does _not_ make an extra system call to get + * the realpath, it only does string path resolution. + * + * Conflicts with {@link withFileTypes} + */ + absolute?: boolean; + /** + * Set to false to enable {@link windowsPathsNoEscape} + * + * @deprecated + */ + allowWindowsEscape?: boolean; + /** + * The current working directory in which to search. Defaults to + * `process.cwd()`. + * + * May be eiher a string path or a `file://` URL object or string. + */ + cwd?: string | URL; + /** + * Include `.dot` files in normal matches and `globstar` + * matches. Note that an explicit dot in a portion of the pattern + * will always match dot files. + */ + dot?: boolean; + /** + * Prepend all relative path strings with `./` (or `.\` on Windows). + * + * Without this option, returned relative paths are "bare", so instead of + * returning `'./foo/bar'`, they are returned as `'foo/bar'`. + * + * Relative patterns starting with `'../'` are not prepended with `./`, even + * if this option is set. + */ + dotRelative?: boolean; + /** + * Follow symlinked directories when expanding `**` + * patterns. This can result in a lot of duplicate references in + * the presence of cyclic links, and make performance quite bad. + * + * By default, a `**` in a pattern will follow 1 symbolic link if + * it is not the first item in the pattern, or none if it is the + * first item in the pattern, following the same behavior as Bash. + */ + follow?: boolean; + /** + * string or string[], or an object with `ignore` and `ignoreChildren` + * methods. + * + * If a string or string[] is provided, then this is treated as a glob + * pattern or array of glob patterns to exclude from matches. To ignore all + * children within a directory, as well as the entry itself, append `'/**'` + * to the ignore pattern. + * + * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of + * any other settings. + * + * If an object is provided that has `ignored(path)` and/or + * `childrenIgnored(path)` methods, then these methods will be called to + * determine whether any Path is a match or if its children should be + * traversed, respectively. + */ + ignore?: string | string[] | IgnoreLike; + /** + * Treat brace expansion like `{a,b}` as a "magic" pattern. Has no + * effect if {@link nobrace} is set. + * + * Only has effect on the {@link hasMagic} function. + */ + magicalBraces?: boolean; + /** + * Add a `/` character to directory matches. Note that this requires + * additional stat calls in some cases. + */ + mark?: boolean; + /** + * Perform a basename-only match if the pattern does not contain any slash + * characters. That is, `*.js` would be treated as equivalent to + * `**\/*.js`, matching all js files in all directories. + */ + matchBase?: boolean; + /** + * Limit the directory traversal to a given depth below the cwd. + * Note that this does NOT prevent traversal to sibling folders, + * root patterns, and so on. It only limits the maximum folder depth + * that the walk will descend, relative to the cwd. + */ + maxDepth?: number; + /** + * Do not expand `{a,b}` and `{1..3}` brace sets. + */ + nobrace?: boolean; + /** + * Perform a case-insensitive match. This defaults to `true` on macOS and + * Windows systems, and `false` on all others. + * + * **Note** `nocase` should only be explicitly set when it is + * known that the filesystem's case sensitivity differs from the + * platform default. If set `true` on case-sensitive file + * systems, or `false` on case-insensitive file systems, then the + * walk may return more or less results than expected. + */ + nocase?: boolean; + /** + * Do not match directories, only files. (Note: to match + * _only_ directories, put a `/` at the end of the pattern.) + */ + nodir?: boolean; + /** + * Do not match "extglob" patterns such as `+(a|b)`. + */ + noext?: boolean; + /** + * Do not match `**` against multiple filenames. (Ie, treat it as a normal + * `*` instead.) + * + * Conflicts with {@link matchBase} + */ + noglobstar?: boolean; + /** + * Defaults to value of `process.platform` if available, or `'linux'` if + * not. Setting `platform:'win32'` on non-Windows systems may cause strange + * behavior. + */ + platform?: NodeJS.Platform; + /** + * Set to true to call `fs.realpath` on all of the + * results. In the case of an entry that cannot be resolved, the + * entry is omitted. This incurs a slight performance penalty, of + * course, because of the added system calls. + */ + realpath?: boolean; + /** + * + * A string path resolved against the `cwd` option, which + * is used as the starting point for absolute patterns that start + * with `/`, (but not drive letters or UNC paths on Windows). + * + * Note that this _doesn't_ necessarily limit the walk to the + * `root` directory, and doesn't affect the cwd starting point for + * non-absolute patterns. A pattern containing `..` will still be + * able to traverse out of the root directory, if it is not an + * actual root directory on the filesystem, and any non-absolute + * patterns will be matched in the `cwd`. For example, the + * pattern `/../*` with `{root:'/some/path'}` will return all + * files in `/some`, not all files in `/some/path`. The pattern + * `*` with `{root:'/some/path'}` will return all the entries in + * the cwd, not the entries in `/some/path`. + * + * To start absolute and non-absolute patterns in the same + * path, you can use `{root:''}`. However, be aware that on + * Windows systems, a pattern like `x:/*` or `//host/share/*` will + * _always_ start in the `x:/` or `//host/share` directory, + * regardless of the `root` setting. + */ + root?: string; + /** + * A [PathScurry](http://npm.im/path-scurry) object used + * to traverse the file system. If the `nocase` option is set + * explicitly, then any provided `scurry` object must match this + * setting. + */ + scurry?: PathScurry; + /** + * Call `lstat()` on all entries, whether required or not to determine + * if it's a valid match. When used with {@link withFileTypes}, this means + * that matches will include data such as modified time, permissions, and + * so on. Note that this will incur a performance cost due to the added + * system calls. + */ + stat?: boolean; + /** + * An AbortSignal which will cancel the Glob walk when + * triggered. + */ + signal?: AbortSignal; + /** + * Use `\\` as a path separator _only_, and + * _never_ as an escape character. If set, all `\\` characters are + * replaced with `/` in the pattern. + * + * Note that this makes it **impossible** to match against paths + * containing literal glob pattern characters, but allows matching + * with patterns constructed using `path.join()` and + * `path.resolve()` on Windows platforms, mimicking the (buggy!) + * behavior of Glob v7 and before on Windows. Please use with + * caution, and be mindful of [the caveat below about Windows + * paths](#windows). (For legacy reasons, this is also set if + * `allowWindowsEscape` is set to the exact value `false`.) + */ + windowsPathsNoEscape?: boolean; + /** + * Return [PathScurry](http://npm.im/path-scurry) + * `Path` objects instead of strings. These are similar to a + * NodeJS `Dirent` object, but with additional methods and + * properties. + * + * Conflicts with {@link absolute} + */ + withFileTypes?: boolean; + /** + * An fs implementation to override some or all of the defaults. See + * http://npm.im/path-scurry for details about what can be overridden. + */ + fs?: FSOption; + /** + * Just passed along to Minimatch. Note that this makes all pattern + * matching operations slower and *extremely* noisy. + */ + debug?: boolean; + /** + * Return `/` delimited paths, even on Windows. + * + * On posix systems, this has no effect. But, on Windows, it means that + * paths will be `/` delimited, and absolute paths will be their full + * resolved UNC forms, eg instead of `'C:\\foo\\bar'`, it would return + * `'//?/C:/foo/bar'` + */ + posix?: boolean; +} +export type GlobOptionsWithFileTypesTrue = GlobOptions & { + withFileTypes: true; + absolute?: undefined; + mark?: undefined; + posix?: undefined; +}; +export type GlobOptionsWithFileTypesFalse = GlobOptions & { + withFileTypes?: false; +}; +export type GlobOptionsWithFileTypesUnset = GlobOptions & { + withFileTypes?: undefined; +}; +export type Result = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path; +export type Results = Result[]; +export type FileTypes = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean; +/** + * An object that can perform glob pattern traversals. + */ +export declare class Glob implements GlobOptions { + absolute?: boolean; + cwd: string; + root?: string; + dot: boolean; + dotRelative: boolean; + follow: boolean; + ignore?: string | string[] | IgnoreLike; + magicalBraces: boolean; + mark?: boolean; + matchBase: boolean; + maxDepth: number; + nobrace: boolean; + nocase: boolean; + nodir: boolean; + noext: boolean; + noglobstar: boolean; + pattern: string[]; + platform: NodeJS.Platform; + realpath: boolean; + scurry: PathScurry; + stat: boolean; + signal?: AbortSignal; + windowsPathsNoEscape: boolean; + withFileTypes: FileTypes; + /** + * The options provided to the constructor. + */ + opts: Opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns: Pattern[]; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern: string | string[], opts: Opts); + /** + * Returns a Promise that resolves to the results array. + */ + walk(): Promise>; + /** + * synchronous {@link Glob.walk} + */ + walkSync(): Results; + /** + * Stream results asynchronously. + */ + stream(): Minipass, Result>; + /** + * Stream results synchronously. + */ + streamSync(): Minipass, Result>; + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync(): Generator, void, void>; + [Symbol.iterator](): Generator, void, void>; + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate(): AsyncGenerator, void, void>; + [Symbol.asyncIterator](): AsyncGenerator, void, void>; +} +//# sourceMappingURL=glob.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts.map new file mode 100644 index 00000000000000..d45258ac24a580 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js new file mode 100644 index 00000000000000..8ff26154427be9 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js @@ -0,0 +1,234 @@ +import { Minimatch } from 'minimatch'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { fileURLToPath } from 'url'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? PathScurryWin32 + : opts.platform === 'darwin' + ? PathScurryDarwin + : opts.platform + ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js.map new file mode 100644 index 00000000000000..94558c1d2c66a4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,eAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,gBAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,eAAe;wBACjB,CAAC,CAAC,UAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts new file mode 100644 index 00000000000000..8aec3bd9725175 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts @@ -0,0 +1,14 @@ +import { GlobOptions } from './glob.js'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; +//# sourceMappingURL=has-magic.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts.map new file mode 100644 index 00000000000000..b24dd4ec47e0bb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js new file mode 100644 index 00000000000000..ba2321ab868d02 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js.map new file mode 100644 index 00000000000000..27fd78dbae62cf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAGrC;;;;;;;;;;GAUG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC3B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;KACpB;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;QACvB,IAAI,IAAI,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;KACtD;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA","sourcesContent":["import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n pattern: string | string[],\n options: GlobOptions = {}\n): boolean => {\n if (!Array.isArray(pattern)) {\n pattern = [pattern]\n }\n for (const p of pattern) {\n if (new Minimatch(p, options).hasMagic()) return true\n }\n return false\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts new file mode 100644 index 00000000000000..e9d74f3b5e1291 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts @@ -0,0 +1,20 @@ +import { Minimatch } from 'minimatch'; +import { Path } from 'path-scurry'; +import { GlobWalkerOpts } from './walker.js'; +export interface IgnoreLike { + ignored?: (p: Path) => boolean; + childrenIgnored?: (p: Path) => boolean; +} +/** + * Class used to process ignored patterns + */ +export declare class Ignore implements IgnoreLike { + relative: Minimatch[]; + relativeChildren: Minimatch[]; + absolute: Minimatch[]; + absoluteChildren: Minimatch[]; + constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts); + ignored(p: Path): boolean; + childrenIgnored(p: Path): boolean; +} +//# sourceMappingURL=ignore.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts.map new file mode 100644 index 00000000000000..e0018cf935b046 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AACrC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;CACvC;AASD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;gBAG3B,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAiDnB,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js new file mode 100644 index 00000000000000..2dbaa16a11460e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js @@ -0,0 +1,99 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new Pattern(parsed, globParts, 0, platform); + const m = new Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js.map new file mode 100644 index 00000000000000..1038b712396eaf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;AAE7C,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAErC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAQtC,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEb;;GAEG;AACH,MAAM,OAAO,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAE7B,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,MAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QAED,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;YACzB,MAAM,EAAE,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACtC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;gBACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;gBACjC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;gBACrD,MAAM,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,CAAA;gBAC/C,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;gBACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;gBAC/B,IAAI,QAAQ;oBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBAC1B,IAAI,QAAQ,EAAE;oBACZ,IAAI,QAAQ;wBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;wBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACnC;aACF;SACF;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,IAAI,CAAA;SAC5B;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF","sourcesContent":["// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n ignored?: (p: Path) => boolean\n childrenIgnored?: (p: Path) => boolean\n}\n\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n relative: Minimatch[]\n relativeChildren: Minimatch[]\n absolute: Minimatch[]\n absoluteChildren: Minimatch[]\n\n constructor(\n ignored: string[],\n {\n nobrace,\n nocase,\n noext,\n noglobstar,\n platform = defaultPlatform,\n }: GlobWalkerOpts\n ) {\n this.relative = []\n this.absolute = []\n this.relativeChildren = []\n this.absoluteChildren = []\n const mmopts = {\n dot: true,\n nobrace,\n nocase,\n noext,\n noglobstar,\n optimizationLevel: 2,\n platform,\n nocomment: true,\n nonegate: true,\n }\n\n // this is a little weird, but it gives us a clean set of optimized\n // minimatch matchers, without getting tripped up if one of them\n // ends in /** inside a brace section, and it's only inefficient at\n // the start of the walk, not along it.\n // It'd be nice if the Pattern class just had a .test() method, but\n // handling globstars is a bit of a pita, and that code already lives\n // in minimatch anyway.\n // Another way would be if maybe Minimatch could take its set/globParts\n // as an option, and then we could at least just use Pattern to test\n // for absolute-ness.\n // Yet another way, Minimatch could take an array of glob strings, and\n // a cwd option, and do the right thing.\n for (const ign of ignored) {\n const mm = new Minimatch(ign, mmopts)\n for (let i = 0; i < mm.set.length; i++) {\n const parsed = mm.set[i]\n const globParts = mm.globParts[i]\n const p = new Pattern(parsed, globParts, 0, platform)\n const m = new Minimatch(p.globString(), mmopts)\n const children = globParts[globParts.length - 1] === '**'\n const absolute = p.isAbsolute()\n if (absolute) this.absolute.push(m)\n else this.relative.push(m)\n if (children) {\n if (absolute) this.absoluteChildren.push(m)\n else this.relativeChildren.push(m)\n }\n }\n }\n }\n\n ignored(p: Path): boolean {\n const fullpath = p.fullpath()\n const fullpaths = `${fullpath}/`\n const relative = p.relative() || '.'\n const relatives = `${relative}/`\n for (const m of this.relative) {\n if (m.match(relative) || m.match(relatives)) return true\n }\n for (const m of this.absolute) {\n if (m.match(fullpath) || m.match(fullpaths)) return true\n }\n return false\n }\n\n childrenIgnored(p: Path): boolean {\n const fullpath = p.fullpath() + '/'\n const relative = (p.relative() || '.') + '/'\n for (const m of this.relativeChildren) {\n if (m.match(relative)) return true\n }\n for (const m of this.absoluteChildren) {\n if (m.match(fullpath)) true\n }\n return false\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts new file mode 100644 index 00000000000000..669bf12e6d5916 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts @@ -0,0 +1,95 @@ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js'; +import { Glob } from './glob.js'; +/** + * Syncronous form of {@link globStream}. Will read all the matches as fast as + * you consume them, even all in a single tick if you consume them immediately, + * but will still respond to backpressure if they're not consumed immediately. + */ +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Return a stream that emits all the strings or `Path` objects and + * then emits `end` when completed. + */ +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Synchronous form of {@link glob} + */ +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[]; +export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[]; +/** + * Perform an asynchronous glob search for the pattern(s) specified. Returns + * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the + * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for + * full option descriptions. + */ +declare function glob_(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise; +declare function glob_(pattern: string | string[], options: GlobOptions): Promise; +/** + * Return a sync iterator for walking glob pattern matches. + */ +export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator | Generator; +/** + * Return an async iterator for walking glob pattern matches. + */ +export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator | AsyncGenerator; +export declare const streamSync: typeof globStreamSync; +export declare const stream: typeof globStream & { + sync: typeof globStreamSync; +}; +export declare const iterateSync: typeof globIterateSync; +export declare const iterate: typeof globIterate & { + sync: typeof globIterateSync; +}; +export declare const sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; +}; +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export type { IgnoreLike } from './ignore.js'; +export type { MatchStream } from './walker.js'; +export declare const glob: typeof glob_ & { + glob: typeof glob_; + globSync: typeof globSync; + sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; + }; + globStream: typeof globStream; + stream: typeof globStream & { + sync: typeof globStreamSync; + }; + globStreamSync: typeof globStreamSync; + streamSync: typeof globStreamSync; + globIterate: typeof globIterate; + iterate: typeof globIterate & { + sync: typeof globIterateSync; + }; + globIterateSync: typeof globIterateSync; + iterateSync: typeof globIterateSync; + Glob: typeof Glob; + hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; + escape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string; + unescape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts.map new file mode 100644 index 00000000000000..c60290eb118a91 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQ9D;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AASxE,eAAO,MAAM,UAAU,uBAAiB,CAAA;AACxC,eAAO,MAAM,MAAM;;CAAsD,CAAA;AACzE,eAAO,MAAM,WAAW,wBAAkB,CAAA;AAC1C,eAAO,MAAM,OAAO;;CAElB,CAAA;AACF,eAAO,MAAM,IAAI;;;CAGf,CAAA;AAGF,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAG9C,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;CAgBf,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js new file mode 100644 index 00000000000000..7b270117e740ad --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js @@ -0,0 +1,56 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +/* c8 ignore stop */ +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js.map new file mode 100644 index 00000000000000..2d4fc077271b15 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAS5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAuBzC,MAAM,UAAU,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AAsBD,MAAM,UAAU,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AAqBD,MAAM,UAAU,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AAwBD,KAAK,UAAU,KAAK,CAClB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,MAAM,UAAU,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AAqBD,MAAM,UAAU,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AAED,iEAAiE;AACjE,MAAM,CAAC,MAAM,UAAU,GAAG,cAAc,CAAA;AACxC,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAA;AACzE,MAAM,CAAC,MAAM,WAAW,GAAG,eAAe,CAAA;AAC1C,MAAM,CAAC,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;IAChD,IAAI,EAAE,eAAe;CACtB,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;IAC1C,MAAM,EAAE,cAAc;IACtB,OAAO,EAAE,eAAe;CACzB,CAAC,CAAA;AAEF,qBAAqB;AACrB,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAOhC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAGzC,oBAAoB;AAEpB,MAAM,CAAC,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;IACvC,IAAI,EAAE,KAAK;IACX,QAAQ;IACR,IAAI;IACJ,UAAU;IACV,MAAM;IACN,cAAc;IACd,UAAU;IACV,WAAW;IACX,OAAO;IACP,eAAe;IACf,WAAW;IACX,IAAI;IACJ,QAAQ;IACR,MAAM;IACN,QAAQ;CACT,CAAC,CAAA;AACF,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA","sourcesContent":["import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesUnset\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Path[]\nexport function globSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions\n): Path[] | string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions\n): Generator | Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n stream: globStreamSync,\n iterate: globIterateSync,\n})\n\n/* c8 ignore start */\nexport { escape, unescape } from 'minimatch'\nexport { Glob } from './glob.js'\nexport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n/* c8 ignore stop */\n\nexport const glob = Object.assign(glob_, {\n glob: glob_,\n globSync,\n sync,\n globStream,\n stream,\n globStreamSync,\n streamSync,\n globIterate,\n iterate,\n globIterateSync,\n iterateSync,\n Glob,\n hasMagic,\n escape,\n unescape,\n})\nglob.glob = glob\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json new file mode 100644 index 00000000000000..5cc80943d565b7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "module" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts new file mode 100644 index 00000000000000..109cc4e7a5dae3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts @@ -0,0 +1,77 @@ +/// +import { GLOBSTAR } from 'minimatch'; +export type MMPattern = string | RegExp | typeof GLOBSTAR; +export type PatternList = [p: MMPattern, ...rest: MMPattern[]]; +export type UNCPatternList = [ + p0: '', + p1: '', + p2: string, + p3: string, + ...rest: MMPattern[] +]; +export type DrivePatternList = [p0: string, ...rest: MMPattern[]]; +export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]; +export type GlobList = [p: string, ...rest: string[]]; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export declare class Pattern { + #private; + readonly length: number; + constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform); + /** + * The first entry in the parsed list of patterns + */ + pattern(): MMPattern; + /** + * true of if pattern() returns a string + */ + isString(): boolean; + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar(): boolean; + /** + * true if pattern() returns a regexp + */ + isRegExp(): boolean; + /** + * The /-joined set of glob parts that make up this pattern + */ + globString(): string; + /** + * true if there are more pattern parts after this one + */ + hasMore(): boolean; + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest(): Pattern | null; + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC(): boolean; + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive(): boolean; + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute(): boolean; + /** + * consume the root of the pattern, and return it + */ + root(): string; + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar(): boolean; + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar(): boolean; +} +//# sourceMappingURL=pattern.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts.map new file mode 100644 index 00000000000000..1430a77dadbbe4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IAOd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js new file mode 100644 index 00000000000000..60aa415d92fd12 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js.map new file mode 100644 index 00000000000000..bb039c142107fc --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.js","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":"AAAA,yEAAyE;AAEzE,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAgBpC,MAAM,aAAa,GAAG,CAAC,EAAe,EAAqB,EAAE,CAC3D,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAChB,MAAM,UAAU,GAAG,CAAC,EAAY,EAAkB,EAAE,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAEnE;;;GAGG;AACH,MAAM,OAAO,OAAO;IACT,YAAY,CAAa;IACzB,SAAS,CAAU;IACnB,MAAM,CAAQ;IACd,MAAM,CAAQ;IACd,SAAS,CAAiB;IACnC,KAAK,CAAiB;IACtB,WAAW,CAAS;IACpB,QAAQ,CAAU;IAClB,MAAM,CAAU;IAChB,WAAW,CAAU;IACrB,eAAe,GAAY,IAAI,CAAA;IAE/B,YACE,WAAwB,EACxB,QAAkB,EAClB,KAAa,EACb,QAAyB;QAEzB,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE;YAC/B,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;SACvC;QACD,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE;YAC1C,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAA;SACrE;QACD,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAA;QAChC,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE;YACrC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QAEzB,mEAAmE;QACnE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;YACrB,gBAAgB;YAChB,iBAAiB;YACjB,uBAAuB;YACvB,oCAAoC;YACpC,qCAAqC;YACrC,2CAA2C;YAC3C,uBAAuB;YACvB,aAAa;YACb,IAAI,IAAI,CAAC,KAAK,EAAE,EAAE;gBAChB,6BAA6B;gBAC7B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACpD,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACjD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;iBAAM,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBAC9C,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACxC,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACrC,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAI,EAAa,GAAG,GAAG,CAAA;gBAC9B,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;SACF;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACvC,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IAC3D,CAAC;IACD;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IACpD,CAAC;IACD;;OAEG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,MAAM,CAAA;IACzD,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,IAAI,CAAC,WAAW;YACtB,IAAI,CAAC,WAAW;gBAChB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;oBAChB,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE;wBACjB,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;wBACvD,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;oBAC5B,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACtC,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,KAAK,KAAK,SAAS;YAAE,OAAO,IAAI,CAAC,KAAK,CAAA;QAC/C,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YAAE,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,GAAG,IAAI,OAAO,CACtB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,MAAM,GAAG,CAAC,EACf,IAAI,CAAC,SAAS,CACf,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;QACzC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAC/B,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QACnC,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS;YAC9B,CAAC,CAAC,IAAI,CAAC,MAAM;YACb,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;gBACV,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;oBACP,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;IAChB,CAAC;IAED,sBAAsB;IACtB,sBAAsB;IACtB,mEAAmE;IACnE,sEAAsE;IACtE,6CAA6C;IAC7C;;OAEG;IACH,OAAO;QACL,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,QAAQ,KAAK,SAAS;YAChC,CAAC,CAAC,IAAI,CAAC,QAAQ;YACf,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ;gBACZ,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,IAAI,CAAC,MAAM,GAAG,CAAC;oBACf,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAChC,CAAC;IAED,sCAAsC;IACtC,kDAAkD;IAClD,oDAAoD;IACpD;;OAEG;IACH,UAAU;QACR,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS;YACnC,CAAC,CAAC,IAAI,CAAC,WAAW;YAClB,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW;gBACf,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC/B,IAAI,CAAC,OAAO,EAAE;oBACd,IAAI,CAAC,KAAK,EAAE,CAAC,CAAA;IACrB,CAAC;IAED;;OAEG;IACH,IAAI;QACF,MAAM,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;QAC9B,OAAO,OAAO,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YACpE,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED;;;OAGG;IACH,mBAAmB;QACjB,OAAO,CAAC,CACN,IAAI,CAAC,MAAM,KAAK,CAAC;YACjB,CAAC,IAAI,CAAC,UAAU,EAAE;YAClB,CAAC,IAAI,CAAC,eAAe,CACtB,CAAA;IACH,CAAC;IAED;;OAEG;IACH,kBAAkB;QAChB,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,eAAe;YAClE,OAAO,KAAK,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,KAAK,CAAA;QAC5B,OAAO,IAAI,CAAA;IACb,CAAC;CACF","sourcesContent":["// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n p0: '',\n p1: '',\n p2: string,\n p3: string,\n ...rest: MMPattern[]\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n readonly #patternList: PatternList\n readonly #globList: GlobList\n readonly #index: number\n readonly length: number\n readonly #platform: NodeJS.Platform\n #rest?: Pattern | null\n #globString?: string\n #isDrive?: boolean\n #isUNC?: boolean\n #isAbsolute?: boolean\n #followGlobstar: boolean = true\n\n constructor(\n patternList: MMPattern[],\n globList: string[],\n index: number,\n platform: NodeJS.Platform\n ) {\n if (!isPatternList(patternList)) {\n throw new TypeError('empty pattern list')\n }\n if (!isGlobList(globList)) {\n throw new TypeError('empty glob list')\n }\n if (globList.length !== patternList.length) {\n throw new TypeError('mismatched pattern list and glob list lengths')\n }\n this.length = patternList.length\n if (index < 0 || index >= this.length) {\n throw new TypeError('index out of range')\n }\n this.#patternList = patternList\n this.#globList = globList\n this.#index = index\n this.#platform = platform\n\n // normalize root entries of absolute patterns on initial creation.\n if (this.#index === 0) {\n // c: => ['c:/']\n // C:/ => ['C:/']\n // C:/x => ['C:/', 'x']\n // //host/share => ['//host/share/']\n // //host/share/ => ['//host/share/']\n // //host/share/x => ['//host/share/', 'x']\n // /etc => ['/', 'etc']\n // / => ['/']\n if (this.isUNC()) {\n // '' / '' / 'host' / 'share'\n const [p0, p1, p2, p3, ...prest] = this.#patternList\n const [g0, g1, g2, g3, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = [p0, p1, p2, p3, ''].join('/')\n const g = [g0, g1, g2, g3, ''].join('/')\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n } else if (this.isDrive() || this.isAbsolute()) {\n const [p1, ...prest] = this.#patternList\n const [g1, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = (p1 as string) + '/'\n const g = g1 + '/'\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n }\n }\n }\n\n /**\n * The first entry in the parsed list of patterns\n */\n pattern(): MMPattern {\n return this.#patternList[this.#index]\n }\n\n /**\n * true of if pattern() returns a string\n */\n isString(): boolean {\n return typeof this.#patternList[this.#index] === 'string'\n }\n /**\n * true of if pattern() returns GLOBSTAR\n */\n isGlobstar(): boolean {\n return this.#patternList[this.#index] === GLOBSTAR\n }\n /**\n * true if pattern() returns a regexp\n */\n isRegExp(): boolean {\n return this.#patternList[this.#index] instanceof RegExp\n }\n\n /**\n * The /-joined set of glob parts that make up this pattern\n */\n globString(): string {\n return (this.#globString =\n this.#globString ||\n (this.#index === 0\n ? this.isAbsolute()\n ? this.#globList[0] + this.#globList.slice(1).join('/')\n : this.#globList.join('/')\n : this.#globList.slice(this.#index).join('/')))\n }\n\n /**\n * true if there are more pattern parts after this one\n */\n hasMore(): boolean {\n return this.length > this.#index + 1\n }\n\n /**\n * The rest of the pattern after this part, or null if this is the end\n */\n rest(): Pattern | null {\n if (this.#rest !== undefined) return this.#rest\n if (!this.hasMore()) return (this.#rest = null)\n this.#rest = new Pattern(\n this.#patternList,\n this.#globList,\n this.#index + 1,\n this.#platform\n )\n this.#rest.#isAbsolute = this.#isAbsolute\n this.#rest.#isUNC = this.#isUNC\n this.#rest.#isDrive = this.#isDrive\n return this.#rest\n }\n\n /**\n * true if the pattern represents a //unc/path/ on windows\n */\n isUNC(): boolean {\n const pl = this.#patternList\n return this.#isUNC !== undefined\n ? this.#isUNC\n : (this.#isUNC =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n pl[0] === '' &&\n pl[1] === '' &&\n typeof pl[2] === 'string' &&\n !!pl[2] &&\n typeof pl[3] === 'string' &&\n !!pl[3])\n }\n\n // pattern like C:/...\n // split = ['C:', ...]\n // XXX: would be nice to handle patterns like `c:*` to test the cwd\n // in c: for *, but I don't know of a way to even figure out what that\n // cwd is without actually chdir'ing into it?\n /**\n * True if the pattern starts with a drive letter on Windows\n */\n isDrive(): boolean {\n const pl = this.#patternList\n return this.#isDrive !== undefined\n ? this.#isDrive\n : (this.#isDrive =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n this.length > 1 &&\n typeof pl[0] === 'string' &&\n /^[a-z]:$/i.test(pl[0]))\n }\n\n // pattern = '/' or '/...' or '/x/...'\n // split = ['', ''] or ['', ...] or ['', 'x', ...]\n // Drive and UNC both considered absolute on windows\n /**\n * True if the pattern is rooted on an absolute path\n */\n isAbsolute(): boolean {\n const pl = this.#patternList\n return this.#isAbsolute !== undefined\n ? this.#isAbsolute\n : (this.#isAbsolute =\n (pl[0] === '' && pl.length > 1) ||\n this.isDrive() ||\n this.isUNC())\n }\n\n /**\n * consume the root of the pattern, and return it\n */\n root(): string {\n const p = this.#patternList[0]\n return typeof p === 'string' && this.isAbsolute() && this.#index === 0\n ? p\n : ''\n }\n\n /**\n * Check to see if the current globstar pattern is allowed to follow\n * a symbolic link.\n */\n checkFollowGlobstar(): boolean {\n return !(\n this.#index === 0 ||\n !this.isGlobstar() ||\n !this.#followGlobstar\n )\n }\n\n /**\n * Mark that the current globstar pattern is following a symbolic link\n */\n markFollowGlobstar(): boolean {\n if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n return false\n this.#followGlobstar = false\n return true\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts new file mode 100644 index 00000000000000..ccedfbf2820f7d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts @@ -0,0 +1,59 @@ +import { MMRegExp } from 'minimatch'; +import { Path } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobWalkerOpts } from './walker.js'; +/** + * A cache of which patterns have been processed for a given Path + */ +export declare class HasWalkedCache { + store: Map>; + constructor(store?: Map>); + copy(): HasWalkedCache; + hasWalked(target: Path, pattern: Pattern): boolean | undefined; + storeWalked(target: Path, pattern: Pattern): void; +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export declare class MatchRecord { + store: Map; + add(target: Path, absolute: boolean, ifDir: boolean): void; + entries(): [Path, boolean, boolean][]; +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export declare class SubWalks { + store: Map; + add(target: Path, pattern: Pattern): void; + get(target: Path): Pattern[]; + entries(): [Path, Pattern[]][]; + keys(): Path[]; +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export declare class Processor { + hasWalkedCache: HasWalkedCache; + matches: MatchRecord; + subwalks: SubWalks; + patterns?: Pattern[]; + follow: boolean; + dot: boolean; + opts: GlobWalkerOpts; + constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache); + processPatterns(target: Path, patterns: Pattern[]): this; + subwalkTargets(): Path[]; + child(): Processor; + filterEntries(parent: Path, entries: Path[]): Processor; + testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void; + testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void; + testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void; +} +//# sourceMappingURL=processor.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts.map new file mode 100644 index 00000000000000..75d92efe28cb1d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IASjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAwGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js new file mode 100644 index 00000000000000..dd2228ad6761a5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js @@ -0,0 +1,302 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js.map new file mode 100644 index 00000000000000..bf17d8e99b04a6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.js","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAAA,qEAAqE;AAErE,OAAO,EAAE,QAAQ,EAAY,MAAM,WAAW,CAAA;AAK9C;;GAEG;AACH,MAAM,OAAO,cAAc;IACzB,KAAK,CAA0B;IAC/B,YAAY,QAAkC,IAAI,GAAG,EAAE;QACrD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IACD,IAAI;QACF,OAAO,IAAI,cAAc,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAChD,CAAC;IACD,SAAS,CAAC,MAAY,EAAE,OAAgB;QACtC,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,EAAE,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,CAAC;IACD,WAAW,CAAC,MAAY,EAAE,OAAgB;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAClC,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;QACvC,IAAI,MAAM;YAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;;YACvC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;IAChE,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,OAAO,WAAW;IACtB,KAAK,GAAsB,IAAI,GAAG,EAAE,CAAA;IACpC,GAAG,CAAC,MAAY,EAAE,QAAiB,EAAE,KAAc;QACjD,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAA;IACjE,CAAC;IACD,yBAAyB;IACzB,OAAO;QACL,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;YAClD,IAAI;YACJ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YACT,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;SACV,CAAC,CAAA;IACJ,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,QAAQ;IACnB,KAAK,GAAyB,IAAI,GAAG,EAAE,CAAA;IACvC,GAAG,CAAC,MAAY,EAAE,OAAgB;QAChC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE;YACxB,OAAM;SACP;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,KAAK,OAAO,CAAC,UAAU,EAAE,CAAC,EAAE;gBAC5D,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;SACF;;YAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAA;IAC1C,CAAC;IACD,GAAG,CAAC,MAAY;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,qBAAqB;QACrB,IAAI,CAAC,IAAI,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAA;SACnD;QACD,oBAAoB;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAc,CAAC,CAAC,CAAA;IAClE,CAAC;IACD,IAAI;QACF,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3D,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,SAAS;IACpB,cAAc,CAAgB;IAC9B,OAAO,GAAG,IAAI,WAAW,EAAE,CAAA;IAC3B,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAA;IACzB,QAAQ,CAAY;IACpB,MAAM,CAAS;IACf,GAAG,CAAS;IACZ,IAAI,CAAgB;IAEpB,YAAY,IAAoB,EAAE,cAA+B;QAC/D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,cAAc,GAAG,cAAc;YAClC,CAAC,CAAC,cAAc,CAAC,IAAI,EAAE;YACvB,CAAC,CAAC,IAAI,cAAc,EAAE,CAAA;IAC1B,CAAC;IAED,eAAe,CAAC,MAAY,EAAE,QAAmB;QAC/C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,MAAM,aAAa,GAAsB,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QAEvE,gEAAgE;QAChE,uCAAuC;QAEvC,KAAK,IAAI,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,aAAa,EAAE;YACtC,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAE3C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YAC3B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,KAAK,CAAA;YAErE,kCAAkC;YAClC,IAAI,IAAI,EAAE;gBACR,CAAC,GAAG,CAAC,CAAC,OAAO,CACX,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,SAAS;oBAC1C,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI;oBAChB,CAAC,CAAC,IAAI,CACT,CAAA;gBACD,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,IAAI,EAAE;oBACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;oBAChC,SAAQ;iBACT;qBAAM;oBACL,OAAO,GAAG,IAAI,CAAA;iBACf;aACF;YAED,IAAI,CAAC,CAAC,QAAQ,EAAE;gBAAE,SAAQ;YAE1B,IAAI,CAAY,CAAA;YAChB,IAAI,IAAoB,CAAA;YACxB,IAAI,OAAO,GAAG,KAAK,CAAA;YACnB,OACE,OAAO,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,QAAQ;gBAC3C,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,EACvB;gBACA,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACtB,sDAAsD;gBACtD,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,IAAI;oBAAE,MAAK;gBACtC,CAAC,GAAG,CAAC,CAAA;gBACL,OAAO,GAAG,IAAI,CAAA;gBACd,OAAO,GAAG,IAAI,CAAA;aACf;YACD,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;YACrB,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YACrB,IAAI,OAAO,EAAE;gBACX,IAAI,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC;oBAAE,SAAQ;gBACvD,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC5C;YAED,uDAAuD;YACvD,qCAAqC;YACrC,kDAAkD;YAClD,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE;gBACzB,sBAAsB;gBACtB,IAAI,CAAC,IAAI,EAAE;oBACT,MAAM,KAAK,GAAG,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,CAAA;oBACjD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;iBAChD;qBAAM;oBACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,SAAQ;aACT;iBAAM,IAAI,CAAC,KAAK,QAAQ,EAAE;gBACzB,wCAAwC;gBACxC,4CAA4C;gBAC5C,wDAAwD;gBACxD,4DAA4D;gBAC5D,gEAAgE;gBAChE,IACE,CAAC,CAAC,CAAC,cAAc,EAAE;oBACnB,IAAI,CAAC,MAAM;oBACX,OAAO,CAAC,mBAAmB,EAAE,EAC7B;oBACA,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,CAAA;gBAC1B,MAAM,KAAK,GAAG,IAAI,EAAE,IAAI,EAAE,CAAA;gBAC1B,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;oBAClD,iDAAiD;oBACjD,6CAA6C;oBAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAA;iBACvD;qBAAM;oBACL,IAAI,EAAE,KAAK,IAAI,EAAE;wBACf,wDAAwD;wBACxD,wDAAwD;wBACxD,qBAAqB;wBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;wBACxB,oBAAoB;wBACpB,IAAI,CAAC,KAAK;4BAAE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;6BAC3C,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC,EAAE;4BAClD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;yBAC7B;qBACF;iBACF;aACF;iBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;gBAC9B,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC9B;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAA;IAC7B,CAAC;IAED,KAAK;QACH,OAAO,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;IACtD,CAAC;IAED,0DAA0D;IAC1D,yCAAyC;IACzC,6CAA6C;IAC7C,2BAA2B;IAC3B,aAAa,CAAC,MAAY,EAAE,OAAe;QACzC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QAC1C,yDAAyD;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,EAAE,CAAA;QAC5B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;YACvB,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,CAAA;gBACrC,MAAM,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;gBAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,KAAK,QAAQ,EAAE;oBAClB,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACjD;qBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;oBAC9B,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;qBAAM;oBACL,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;aACF;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED,YAAY,CACV,CAAO,EACP,OAAgB,EAChB,IAAoB,EACpB,QAAiB;QAEjB,IAAI,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACvC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;gBACtB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;aACrC;YACD,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE;gBAClB,2DAA2D;gBAC3D,gEAAgE;gBAChE,+DAA+D;gBAC/D,iEAAiE;gBACjE,uDAAuD;gBACvD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE;oBACtC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;qBAAM,IAAI,CAAC,CAAC,cAAc,EAAE,EAAE;oBAC7B,IAAI,IAAI,IAAI,OAAO,CAAC,mBAAmB,EAAE,EAAE;wBACzC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;qBAC3B;yBAAM,IAAI,OAAO,CAAC,kBAAkB,EAAE,EAAE;wBACvC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;qBAC9B;iBACF;aACF;SACF;QACD,sDAAsD;QACtD,YAAY;QACZ,IAAI,IAAI,EAAE;YACR,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAA;YACzB,IACE,OAAO,EAAE,KAAK,QAAQ;gBACtB,sCAAsC;gBACtC,EAAE,KAAK,IAAI;gBACX,EAAE,KAAK,EAAE;gBACT,EAAE,KAAK,GAAG,EACV;gBACA,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;iBAAM,IAAI,EAAE,KAAK,IAAI,EAAE;gBACtB,qBAAqB;gBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;gBACxB,oBAAoB;gBACpB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;aAC5B;iBAAM,IAAI,EAAE,YAAY,MAAM,EAAE;gBAC/B,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;SACF;IACH,CAAC;IAED,UAAU,CACR,CAAO,EACP,CAAW,EACX,IAAoB,EACpB,QAAiB;QAEjB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;YAAE,OAAM;QAC3B,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;IAED,UAAU,CAAC,CAAO,EAAE,CAAS,EAAE,IAAoB,EAAE,QAAiB;QACpE,uBAAuB;QACvB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAAE,OAAM;QACzB,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;CACF","sourcesContent":["// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n store: Map>\n constructor(store: Map> = new Map()) {\n this.store = store\n }\n copy() {\n return new HasWalkedCache(new Map(this.store))\n }\n hasWalked(target: Path, pattern: Pattern) {\n return this.store.get(target.fullpath())?.has(pattern.globString())\n }\n storeWalked(target: Path, pattern: Pattern) {\n const fullpath = target.fullpath()\n const cached = this.store.get(fullpath)\n if (cached) cached.add(pattern.globString())\n else this.store.set(fullpath, new Set([pattern.globString()]))\n }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n store: Map = new Map()\n add(target: Path, absolute: boolean, ifDir: boolean) {\n const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n const current = this.store.get(target)\n this.store.set(target, current === undefined ? n : n & current)\n }\n // match, absolute, ifdir\n entries(): [Path, boolean, boolean][] {\n return [...this.store.entries()].map(([path, n]) => [\n path,\n !!(n & 2),\n !!(n & 1),\n ])\n }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n store: Map = new Map()\n add(target: Path, pattern: Pattern) {\n if (!target.canReaddir()) {\n return\n }\n const subs = this.store.get(target)\n if (subs) {\n if (!subs.find(p => p.globString() === pattern.globString())) {\n subs.push(pattern)\n }\n } else this.store.set(target, [pattern])\n }\n get(target: Path): Pattern[] {\n const subs = this.store.get(target)\n /* c8 ignore start */\n if (!subs) {\n throw new Error('attempting to walk unknown path')\n }\n /* c8 ignore stop */\n return subs\n }\n entries(): [Path, Pattern[]][] {\n return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n }\n keys(): Path[] {\n return [...this.store.keys()].filter(t => t.canReaddir())\n }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n hasWalkedCache: HasWalkedCache\n matches = new MatchRecord()\n subwalks = new SubWalks()\n patterns?: Pattern[]\n follow: boolean\n dot: boolean\n opts: GlobWalkerOpts\n\n constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n this.opts = opts\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.hasWalkedCache = hasWalkedCache\n ? hasWalkedCache.copy()\n : new HasWalkedCache()\n }\n\n processPatterns(target: Path, patterns: Pattern[]) {\n this.patterns = patterns\n const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n // map of paths to the magic-starting subwalks they need to walk\n // first item in patterns is the filter\n\n for (let [t, pattern] of processingSet) {\n this.hasWalkedCache.storeWalked(t, pattern)\n\n const root = pattern.root()\n const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n // start absolute patterns at root\n if (root) {\n t = t.resolve(\n root === '/' && this.opts.root !== undefined\n ? this.opts.root\n : root\n )\n const rest = pattern.rest()\n if (!rest) {\n this.matches.add(t, true, false)\n continue\n } else {\n pattern = rest\n }\n }\n\n if (t.isENOENT()) continue\n\n let p: MMPattern\n let rest: Pattern | null\n let changed = false\n while (\n typeof (p = pattern.pattern()) === 'string' &&\n (rest = pattern.rest())\n ) {\n const c = t.resolve(p)\n // we can be reasonably sure that .. is a readable dir\n if (c.isUnknown() && p !== '..') break\n t = c\n pattern = rest\n changed = true\n }\n p = pattern.pattern()\n rest = pattern.rest()\n if (changed) {\n if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n this.hasWalkedCache.storeWalked(t, pattern)\n }\n\n // now we have either a final string for a known entry,\n // more strings for an unknown entry,\n // or a pattern starting with magic, mounted on t.\n if (typeof p === 'string') {\n // must be final entry\n if (!rest) {\n const ifDir = p === '..' || p === '' || p === '.'\n this.matches.add(t.resolve(p), absolute, ifDir)\n } else {\n this.subwalks.add(t, pattern)\n }\n continue\n } else if (p === GLOBSTAR) {\n // if no rest, match and subwalk pattern\n // if rest, process rest and subwalk pattern\n // if it's a symlink, but we didn't get here by way of a\n // globstar match (meaning it's the first time THIS globstar\n // has traversed a symlink), then we follow it. Otherwise, stop.\n if (\n !t.isSymbolicLink() ||\n this.follow ||\n pattern.checkFollowGlobstar()\n ) {\n this.subwalks.add(t, pattern)\n }\n const rp = rest?.pattern()\n const rrest = rest?.rest()\n if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n // only HAS to be a dir if it ends in **/ or **/.\n // but ending in ** will match files as well.\n this.matches.add(t, absolute, rp === '' || rp === '.')\n } else {\n if (rp === '..') {\n // this would mean you're matching **/.. at the fs root,\n // and no thanks, I'm not gonna test that specific case.\n /* c8 ignore start */\n const tp = t.parent || t\n /* c8 ignore stop */\n if (!rrest) this.matches.add(tp, absolute, true)\n else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n this.subwalks.add(tp, rrest)\n }\n }\n }\n } else if (p instanceof RegExp) {\n this.subwalks.add(t, pattern)\n }\n }\n\n return this\n }\n\n subwalkTargets(): Path[] {\n return this.subwalks.keys()\n }\n\n child() {\n return new Processor(this.opts, this.hasWalkedCache)\n }\n\n // return a new Processor containing the subwalks for each\n // child entry, and a set of matches, and\n // a hasWalkedCache that's a copy of this one\n // then we're going to call\n filterEntries(parent: Path, entries: Path[]): Processor {\n const patterns = this.subwalks.get(parent)\n // put matches and entry walks into the results processor\n const results = this.child()\n for (const e of entries) {\n for (const pattern of patterns) {\n const absolute = pattern.isAbsolute()\n const p = pattern.pattern()\n const rest = pattern.rest()\n if (p === GLOBSTAR) {\n results.testGlobstar(e, pattern, rest, absolute)\n } else if (p instanceof RegExp) {\n results.testRegExp(e, p, rest, absolute)\n } else {\n results.testString(e, p, rest, absolute)\n }\n }\n }\n return results\n }\n\n testGlobstar(\n e: Path,\n pattern: Pattern,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (this.dot || !e.name.startsWith('.')) {\n if (!pattern.hasMore()) {\n this.matches.add(e, absolute, false)\n }\n if (e.canReaddir()) {\n // if we're in follow mode or it's not a symlink, just keep\n // testing the same pattern. If there's more after the globstar,\n // then this symlink consumes the globstar. If not, then we can\n // follow at most ONE symlink along the way, so we mark it, which\n // also checks to ensure that it wasn't already marked.\n if (this.follow || !e.isSymbolicLink()) {\n this.subwalks.add(e, pattern)\n } else if (e.isSymbolicLink()) {\n if (rest && pattern.checkFollowGlobstar()) {\n this.subwalks.add(e, rest)\n } else if (pattern.markFollowGlobstar()) {\n this.subwalks.add(e, pattern)\n }\n }\n }\n }\n // if the NEXT thing matches this entry, then also add\n // the rest.\n if (rest) {\n const rp = rest.pattern()\n if (\n typeof rp === 'string' &&\n // dots and empty were handled already\n rp !== '..' &&\n rp !== '' &&\n rp !== '.'\n ) {\n this.testString(e, rp, rest.rest(), absolute)\n } else if (rp === '..') {\n /* c8 ignore start */\n const ep = e.parent || e\n /* c8 ignore stop */\n this.subwalks.add(ep, rest)\n } else if (rp instanceof RegExp) {\n this.testRegExp(e, rp, rest.rest(), absolute)\n }\n }\n }\n\n testRegExp(\n e: Path,\n p: MMRegExp,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (!p.test(e.name)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n\n testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n // should never happen?\n if (!e.isNamed(p)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts new file mode 100644 index 00000000000000..5c1a0414971b3a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts @@ -0,0 +1,96 @@ +/// +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +import { Processor } from './processor.js'; +export interface GlobWalkerOpts { + absolute?: boolean; + allowWindowsEscape?: boolean; + cwd?: string | URL; + dot?: boolean; + dotRelative?: boolean; + follow?: boolean; + ignore?: string | string[] | IgnoreLike; + mark?: boolean; + matchBase?: boolean; + maxDepth?: number; + nobrace?: boolean; + nocase?: boolean; + nodir?: boolean; + noext?: boolean; + noglobstar?: boolean; + platform?: NodeJS.Platform; + posix?: boolean; + realpath?: boolean; + root?: string; + stat?: boolean; + signal?: AbortSignal; + windowsPathsNoEscape?: boolean; + withFileTypes?: boolean; +} +export type GWOFileTypesTrue = GlobWalkerOpts & { + withFileTypes: true; +}; +export type GWOFileTypesFalse = GlobWalkerOpts & { + withFileTypes: false; +}; +export type GWOFileTypesUnset = GlobWalkerOpts & { + withFileTypes?: undefined; +}; +export type Result = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string; +export type Matches = O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; +export type MatchStream = O extends GWOFileTypesTrue ? Minipass : O extends GWOFileTypesFalse ? Minipass : O extends GWOFileTypesUnset ? Minipass : Minipass; +/** + * basic walking utilities that all the glob walker types use + */ +export declare abstract class GlobUtil { + #private; + path: Path; + patterns: Pattern[]; + opts: O; + seen: Set; + paused: boolean; + aborted: boolean; + signal?: AbortSignal; + maxDepth: number; + constructor(patterns: Pattern[], path: Path, opts: O); + pause(): void; + resume(): void; + onResume(fn: () => any): void; + matchCheck(e: Path, ifDir: boolean): Promise; + matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined; + matchCheckSync(e: Path, ifDir: boolean): Path | undefined; + abstract matchEmit(p: Result): void; + abstract matchEmit(p: string | Path): void; + matchFinish(e: Path, absolute: boolean): void; + match(e: Path, absolute: boolean, ifDir: boolean): Promise; + matchSync(e: Path, absolute: boolean, ifDir: boolean): void; + walkCB(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void; + walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void; +} +export declare class GlobWalker extends GlobUtil { + matches: O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + walk(): Promise>; + walkSync(): Matches; +} +export declare class GlobStream extends GlobUtil { + results: O extends GWOFileTypesTrue ? Minipass : O extends GWOFileTypesFalse ? Minipass : O extends GWOFileTypesUnset ? Minipass : Minipass; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + stream(): MatchStream; + streamSync(): MatchStream; +} +//# sourceMappingURL=walker.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts.map new file mode 100644 index 00000000000000..7c8df20b2f323c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.d.ts","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAOhD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAE1C,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAClB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,CAAC,EAAE,OAAO,CAAA;IAGnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAC1B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,aAAa,CAAC,EAAE,OAAO,CAAA;CACxB;AAED,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,aAAa,EAAE,IAAI,CAAA;CACpB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,EAAE,KAAK,CAAA;CACrB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACrE,IAAI,GACJ,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,IAAI,GAAG,MAAM,CAAA;AAEjB,MAAM,MAAM,OAAO,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACtE,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;AAEtB,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,cAAc,IAC9C,CAAC,SAAS,gBAAgB,GACtB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;AAY5C;;GAEG;AACH,8BAAsB,QAAQ,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;;IACtE,IAAI,EAAE,IAAI,CAAA;IACV,QAAQ,EAAE,OAAO,EAAE,CAAA;IACnB,IAAI,EAAE,CAAC,CAAA;IACP,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAkB;IACjC,MAAM,EAAE,OAAO,CAAQ;IACvB,OAAO,EAAE,OAAO,CAAQ;IAIxB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;gBAEJ,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IA8BpD,KAAK;IAGL,MAAM;IAUN,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG;IAahB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC;IAYpE,cAAc,CAAC,CAAC,EAAE,IAAI,GAAG,SAAS,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAUrE,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAYzD,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IACtC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAE1C,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO;IAsBhC,KAAK,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtE,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI;IAK3D,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAOvD,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IA2Cf,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAsBf,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAO3D,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAqCf,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;CAoBhB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;gBAEV,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAKpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAKvB,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAiBjC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC;CAWvB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;gBAE9B,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAUpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAM7B,MAAM,IAAI,WAAW,CAAC,CAAC,CAAC;IAYxB,UAAU,IAAI,WAAW,CAAC,CAAC,CAAC;CAO7B"} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js new file mode 100644 index 00000000000000..6f3358b0c39a32 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js @@ -0,0 +1,352 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new Ignore([ignore], opts) + : Array.isArray(ignore) + ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js.map new file mode 100644 index 00000000000000..8756bfca294503 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.js","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAEnC,OAAO,EAAE,MAAM,EAAc,MAAM,aAAa,CAAA;AAQhD,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAiE1C,MAAM,UAAU,GAAG,CACjB,MAAsC,EACtC,IAAoB,EACR,EAAE,CACd,OAAO,MAAM,KAAK,QAAQ;IACxB,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC;IAC5B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;QACvB,CAAC,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC;QAC1B,CAAC,CAAC,MAAM,CAAA;AAEZ;;GAEG;AACH,MAAM,OAAgB,QAAQ;IAC5B,IAAI,CAAM;IACV,QAAQ,CAAW;IACnB,IAAI,CAAG;IACP,IAAI,GAAc,IAAI,GAAG,EAAQ,CAAA;IACjC,MAAM,GAAY,KAAK,CAAA;IACvB,OAAO,GAAY,KAAK,CAAA;IACxB,SAAS,GAAkB,EAAE,CAAA;IAC7B,OAAO,CAAa;IACpB,IAAI,CAAY;IAChB,MAAM,CAAc;IACpB,QAAQ,CAAQ;IAGhB,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAA;QACjE,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,OAAO,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;SAC7C;QACD,6DAA6D;QAC7D,mBAAmB;QACnB,qBAAqB;QACrB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAA;QACzC,oBAAoB;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACzC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAA;YAC3B,CAAC,CAAC,CAAA;SACH;IACH,CAAC;IAED,QAAQ,CAAC,IAAU;QACjB,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IAC/D,CAAC;IACD,gBAAgB,CAAC,IAAU;QACzB,OAAO,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,CAAC;IAED,yBAAyB;IACzB,KAAK;QACH,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;IACpB,CAAC;IACD,MAAM;QACJ,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,oBAAoB;QACpB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,EAAE,GAA4B,SAAS,CAAA;QAC3C,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE;YACpD,EAAE,EAAE,CAAA;SACL;IACH,CAAC;IACD,QAAQ,CAAC,EAAa;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,qBAAqB;QACrB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,EAAE,EAAE,CAAA;SACL;aAAM;YACL,oBAAoB;YACpB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;SACxB;IACH,CAAC;IAED,+DAA+D;IAC/D,wCAAwC;IACxC,KAAK,CAAC,UAAU,CAAC,CAAO,EAAE,KAAc;QACtC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;YAChD,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;SACR;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,OAAO,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACnE,CAAC;IAED,cAAc,CAAC,CAAmB,EAAE,KAAc;QAChD,OAAO,CAAC;YACN,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC;YAC1D,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;YAC1B,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;YACtC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YACjB,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,SAAS,CAAA;IACf,CAAC;IAED,cAAc,CAAC,CAAO,EAAE,KAAc;QACpC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,YAAY,EAAE,CAAA;YAC5C,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;SACR;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,OAAO,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACjE,CAAC;IAKD,WAAW,CAAC,CAAO,EAAE,QAAiB;QACpC,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YAAE,OAAM;QAC5B,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;QAC/D,4BAA4B;QAC5B,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YAC3B,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;SAClB;aAAM,IAAI,GAAG,EAAE;YACd,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,IAAI,CAAC,SAAS,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;SAC3B;aAAM;YACL,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,WAAW,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;gBACxD,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI;gBACjB,CAAC,CAAC,EAAE,CAAA;YACR,IAAI,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;SACrD;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QACpD,MAAM,CAAC,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACzC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,SAAS,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QAClD,MAAM,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACvC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,MAAM,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACrD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAC9D,CAAC;IAED,OAAO,CACL,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAAC,CAAA;YAClE,OAAM;SACP;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;SAClD;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE;YAC1C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE;gBAC5D,SAAQ;aACT;YACD,KAAK,EAAE,CAAA;YACP,MAAM,cAAc,GAAG,CAAC,CAAC,aAAa,EAAE,CAAA;YACxC,IAAI,CAAC,CAAC,aAAa,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,cAAc,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;iBAC7C;gBACH,CAAC,CAAC,SAAS,CACT,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,EACzD,IAAI,CACL,CAAA;aACF;SACF;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,OAAO,CACL,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;SAClD;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE;YAC7D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;SACxD;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,UAAU,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACzD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAClE,CAAC;IAED,WAAW,CACT,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CACjB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAClD,CAAA;YACD,OAAM;SACP;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACnC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE;YAC1C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE;gBAC5D,SAAQ;aACT;YACD,KAAK,EAAE,CAAA;YACP,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAA;YAChC,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;SAC/C;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,WAAW,CACT,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC9D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE;YAC7D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;SAC5D;QAED,IAAI,EAAE,CAAA;IACR,CAAC;CACF;AAED,MAAM,OAAO,UAEX,SAAQ,QAAW;IACnB,OAAO,CAMe;IAEtB,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAgB,CAAA;IACxC,CAAC;IAGD,SAAS,CAAC,CAAgB;QACxB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrB,CAAC;IAED,KAAK,CAAC,IAAI;QACR,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAA;SACxB;QACD,MAAM,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;gBACzC,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE;oBACxB,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;iBACxB;qBAAM;oBACL,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAClB;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,QAAQ;QACN,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;SACtB;QACD,4DAA4D;QAC5D,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;YAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;gBAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QACpD,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AAED,MAAM,OAAO,UAEX,SAAQ,QAAW;IACnB,OAAO,CAMmC;IAE1C,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,QAAQ,CAAC;YAC1B,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,UAAU,EAAE,IAAI;SACjB,CAAmB,CAAA;QACpB,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;IAChD,CAAC;IAGD,SAAS,CAAC,CAAgB;QACxB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;QACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO;YAAE,IAAI,CAAC,KAAK,EAAE,CAAA;IACzC,CAAC;IAED,MAAM;QACJ,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAA;QACxB,IAAI,MAAM,CAAC,SAAS,EAAE,EAAE;YACtB,MAAM,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACvB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;YAC9D,CAAC,CAAC,CAAA;SACH;aAAM;YACL,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;SAC7D;QACD,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,UAAU;QACR,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;SACtB;QACD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACnE,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF","sourcesContent":["/**\n * Single-use utility classes to provide functionality to the {@link Glob}\n * methods.\n *\n * @module\n */\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport { Ignore, IgnoreLike } from './ignore.js'\n\n// XXX can we somehow make it so that it NEVER processes a given path more than\n// once, enough that the match set tracking is no longer needed? that'd speed\n// things up a lot. Or maybe bring back nounique, and skip it in that case?\n\n// a single minimatch set entry with 1 or more parts\nimport { Pattern } from './pattern.js'\nimport { Processor } from './processor.js'\n\nexport interface GlobWalkerOpts {\n absolute?: boolean\n allowWindowsEscape?: boolean\n cwd?: string | URL\n dot?: boolean\n dotRelative?: boolean\n follow?: boolean\n ignore?: string | string[] | IgnoreLike\n mark?: boolean\n matchBase?: boolean\n // Note: maxDepth here means \"maximum actual Path.depth()\",\n // not \"maximum depth beyond cwd\"\n maxDepth?: number\n nobrace?: boolean\n nocase?: boolean\n nodir?: boolean\n noext?: boolean\n noglobstar?: boolean\n platform?: NodeJS.Platform\n posix?: boolean\n realpath?: boolean\n root?: string\n stat?: boolean\n signal?: AbortSignal\n windowsPathsNoEscape?: boolean\n withFileTypes?: boolean\n}\n\nexport type GWOFileTypesTrue = GlobWalkerOpts & {\n withFileTypes: true\n}\nexport type GWOFileTypesFalse = GlobWalkerOpts & {\n withFileTypes: false\n}\nexport type GWOFileTypesUnset = GlobWalkerOpts & {\n withFileTypes?: undefined\n}\n\nexport type Result = O extends GWOFileTypesTrue\n ? Path\n : O extends GWOFileTypesFalse\n ? string\n : O extends GWOFileTypesUnset\n ? string\n : Path | string\n\nexport type Matches = O extends GWOFileTypesTrue\n ? Set\n : O extends GWOFileTypesFalse\n ? Set\n : O extends GWOFileTypesUnset\n ? Set\n : Set\n\nexport type MatchStream =\n O extends GWOFileTypesTrue\n ? Minipass\n : O extends GWOFileTypesFalse\n ? Minipass\n : O extends GWOFileTypesUnset\n ? Minipass\n : Minipass\n\nconst makeIgnore = (\n ignore: string | string[] | IgnoreLike,\n opts: GlobWalkerOpts\n): IgnoreLike =>\n typeof ignore === 'string'\n ? new Ignore([ignore], opts)\n : Array.isArray(ignore)\n ? new Ignore(ignore, opts)\n : ignore\n\n/**\n * basic walking utilities that all the glob walker types use\n */\nexport abstract class GlobUtil {\n path: Path\n patterns: Pattern[]\n opts: O\n seen: Set = new Set()\n paused: boolean = false\n aborted: boolean = false\n #onResume: (() => any)[] = []\n #ignore?: IgnoreLike\n #sep: '\\\\' | '/'\n signal?: AbortSignal\n maxDepth: number\n\n constructor(patterns: Pattern[], path: Path, opts: O)\n constructor(patterns: Pattern[], path: Path, opts: O) {\n this.patterns = patterns\n this.path = path\n this.opts = opts\n this.#sep = !opts.posix && opts.platform === 'win32' ? '\\\\' : '/'\n if (opts.ignore) {\n this.#ignore = makeIgnore(opts.ignore, opts)\n }\n // ignore, always set with maxDepth, but it's optional on the\n // GlobOptions type\n /* c8 ignore start */\n this.maxDepth = opts.maxDepth || Infinity\n /* c8 ignore stop */\n if (opts.signal) {\n this.signal = opts.signal\n this.signal.addEventListener('abort', () => {\n this.#onResume.length = 0\n })\n }\n }\n\n #ignored(path: Path): boolean {\n return this.seen.has(path) || !!this.#ignore?.ignored?.(path)\n }\n #childrenIgnored(path: Path): boolean {\n return !!this.#ignore?.childrenIgnored?.(path)\n }\n\n // backpressure mechanism\n pause() {\n this.paused = true\n }\n resume() {\n /* c8 ignore start */\n if (this.signal?.aborted) return\n /* c8 ignore stop */\n this.paused = false\n let fn: (() => any) | undefined = undefined\n while (!this.paused && (fn = this.#onResume.shift())) {\n fn()\n }\n }\n onResume(fn: () => any) {\n if (this.signal?.aborted) return\n /* c8 ignore start */\n if (!this.paused) {\n fn()\n } else {\n /* c8 ignore stop */\n this.#onResume.push(fn)\n }\n }\n\n // do the requisite realpath/stat checking, and return the path\n // to add or undefined to filter it out.\n async matchCheck(e: Path, ifDir: boolean): Promise {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || (await e.realpath())\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir)\n }\n\n matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined {\n return e &&\n (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&\n (!ifDir || e.canReaddir()) &&\n (!this.opts.nodir || !e.isDirectory()) &&\n !this.#ignored(e)\n ? e\n : undefined\n }\n\n matchCheckSync(e: Path, ifDir: boolean): Path | undefined {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || e.realpathSync()\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir)\n }\n\n abstract matchEmit(p: Result): void\n abstract matchEmit(p: string | Path): void\n\n matchFinish(e: Path, absolute: boolean) {\n if (this.#ignored(e)) return\n const abs =\n this.opts.absolute === undefined ? absolute : this.opts.absolute\n this.seen.add(e)\n const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''\n // ok, we have what we need!\n if (this.opts.withFileTypes) {\n this.matchEmit(e)\n } else if (abs) {\n const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath()\n this.matchEmit(abs + mark)\n } else {\n const rel = this.opts.posix ? e.relativePosix() : e.relative()\n const pre =\n this.opts.dotRelative && !rel.startsWith('..' + this.#sep)\n ? '.' + this.#sep\n : ''\n this.matchEmit(!rel ? '.' + mark : pre + rel + mark)\n }\n }\n\n async match(e: Path, absolute: boolean, ifDir: boolean): Promise {\n const p = await this.matchCheck(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n matchSync(e: Path, absolute: boolean, ifDir: boolean): void {\n const p = this.matchCheckSync(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n walkCB(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() => this.walkCB2(target, patterns, processor, cb))\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const childrenCached = t.readdirCached()\n if (t.calledReaddir())\n this.walkCB3(t, childrenCached, processor, next)\n else {\n t.readdirCB(\n (_, entries) => this.walkCB3(t, entries, processor, next),\n true\n )\n }\n }\n\n next()\n }\n\n walkCB3(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2(target, patterns, processor.child(), next)\n }\n\n next()\n }\n\n walkCBSync(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2Sync(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2Sync(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() =>\n this.walkCB2Sync(target, patterns, processor, cb)\n )\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const children = t.readdirSync()\n this.walkCB3Sync(t, children, processor, next)\n }\n\n next()\n }\n\n walkCB3Sync(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2Sync(target, patterns, processor.child(), next)\n }\n\n next()\n }\n}\n\nexport class GlobWalker<\n O extends GlobWalkerOpts = GlobWalkerOpts\n> extends GlobUtil {\n matches: O extends GWOFileTypesTrue\n ? Set\n : O extends GWOFileTypesFalse\n ? Set\n : O extends GWOFileTypesUnset\n ? Set\n : Set\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.matches = new Set() as Matches\n }\n\n matchEmit(e: Result): void\n matchEmit(e: Path | string): void {\n this.matches.add(e)\n }\n\n async walk(): Promise> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n await this.path.lstat()\n }\n await new Promise((res, rej) => {\n this.walkCB(this.path, this.patterns, () => {\n if (this.signal?.aborted) {\n rej(this.signal.reason)\n } else {\n res(this.matches)\n }\n })\n })\n return this.matches\n }\n\n walkSync(): Matches {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n // nothing for the callback to do, because this never pauses\n this.walkCBSync(this.path, this.patterns, () => {\n if (this.signal?.aborted) throw this.signal.reason\n })\n return this.matches\n }\n}\n\nexport class GlobStream<\n O extends GlobWalkerOpts = GlobWalkerOpts\n> extends GlobUtil {\n results: O extends GWOFileTypesTrue\n ? Minipass\n : O extends GWOFileTypesFalse\n ? Minipass\n : O extends GWOFileTypesUnset\n ? Minipass\n : Minipass\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.results = new Minipass({\n signal: this.signal,\n objectMode: true,\n }) as MatchStream\n this.results.on('drain', () => this.resume())\n this.results.on('resume', () => this.resume())\n }\n\n matchEmit(e: Result): void\n matchEmit(e: Path | string): void {\n this.results.write(e)\n if (!this.results.flowing) this.pause()\n }\n\n stream(): MatchStream {\n const target = this.path\n if (target.isUnknown()) {\n target.lstat().then(() => {\n this.walkCB(target, this.patterns, () => this.results.end())\n })\n } else {\n this.walkCB(target, this.patterns, () => this.results.end())\n }\n return this.results\n }\n\n streamSync(): MatchStream {\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n this.walkCBSync(this.path, this.patterns, () => this.results.end())\n return this.results\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json new file mode 100644 index 00000000000000..2d25985d2bbb5d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json @@ -0,0 +1,98 @@ +{ + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", + "name": "glob", + "description": "the most correct and second fastest glob implementation in JavaScript", + "version": "10.3.3", + "bin": "./dist/cjs/src/bin.js", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", + "prebench": "npm run prepare", + "bench": "bash benchmark.sh", + "preprof": "npm run prepare", + "prof": "bash prof.sh", + "benchclean": "node benchclean.js" + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.0.3", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "devDependencies": { + "@types/node": "^20.3.2", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "memfs": "^3.4.13", + "mkdirp": "^2.1.4", + "prettier": "^2.8.3", + "rimraf": "^4.1.3", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "typedoc": "^0.23.24", + "typescript": "^4.9.4" + }, + "tap": { + "before": "test/00-setup.ts", + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE new file mode 100644 index 00000000000000..1493534e60dce4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js new file mode 100644 index 00000000000000..5fc86bbd0116c9 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js new file mode 100644 index 00000000000000..0b0cc8f3c50b3d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js @@ -0,0 +1,589 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js new file mode 100644 index 00000000000000..0e13eefc4cfee2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js new file mode 100644 index 00000000000000..02a4f8a8e0a588 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js new file mode 100644 index 00000000000000..d70e681fef5d7d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js @@ -0,0 +1,1011 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = __importDefault(require("brace-expansion")); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.default)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
      // -> 
      /
      +            if (!this.preserveMultipleSlashes) {
      +                for (let i = 1; i < parts.length - 1; i++) {
      +                    const p = parts[i];
      +                    // don't squeeze out UNC patterns
      +                    if (i === 1 && p === '' && parts[0] === '')
      +                        continue;
      +                    if (p === '.' || p === '') {
      +                        didSomething = true;
      +                        parts.splice(i, 1);
      +                        i--;
      +                    }
      +                }
      +                if (parts[0] === '.' &&
      +                    parts.length === 2 &&
      +                    (parts[1] === '.' || parts[1] === '')) {
      +                    didSomething = true;
      +                    parts.pop();
      +                }
      +            }
      +            // 
      /

      /../ ->

      /
      +            let dd = 0;
      +            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
      +                const p = parts[dd - 1];
      +                if (p && p !== '.' && p !== '..' && p !== '**') {
      +                    didSomething = true;
      +                    parts.splice(dd - 1, 2);
      +                    dd -= 2;
      +                }
      +            }
      +        } while (didSomething);
      +        return parts.length === 0 ? [''] : parts;
      +    }
      +    // First phase: single-pattern processing
      +    // 
       is 1 or more portions
      +    //  is 1 or more portions
      +    // 

      is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

      /**/../

      /

      / -> {

      /../

      /

      /,

      /**/

      /

      /} + //

      // -> 
      /
      +    // 
      /

      /../ ->

      /
      +    // **/**/ -> **/
      +    //
      +    // **/*/ -> */**/ <== not valid because ** doesn't follow
      +    // this WOULD be allowed if ** did follow symlinks, or * didn't
      +    firstPhasePreProcess(globParts) {
      +        let didSomething = false;
      +        do {
      +            didSomething = false;
      +            // 
      /**/../

      /

      / -> {

      /../

      /

      /,

      /**/

      /

      /} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

      /**/**/ -> 
      /**/
      +                        gss++;
      +                    }
      +                    // eg, if gs is 2 and gss is 4, that means we have 3 **
      +                    // parts, and can remove 2 of them.
      +                    if (gss > gs) {
      +                        parts.splice(gs + 1, gss - gs);
      +                    }
      +                    let next = parts[gs + 1];
      +                    const p = parts[gs + 2];
      +                    const p2 = parts[gs + 3];
      +                    if (next !== '..')
      +                        continue;
      +                    if (!p ||
      +                        p === '.' ||
      +                        p === '..' ||
      +                        !p2 ||
      +                        p2 === '.' ||
      +                        p2 === '..') {
      +                        continue;
      +                    }
      +                    didSomething = true;
      +                    // edit parts in place, and push the new one
      +                    parts.splice(gs, 1);
      +                    const other = parts.slice(0);
      +                    other[gs] = '**';
      +                    globParts.push(other);
      +                    gs--;
      +                }
      +                // 
      // -> 
      /
      +                if (!this.preserveMultipleSlashes) {
      +                    for (let i = 1; i < parts.length - 1; i++) {
      +                        const p = parts[i];
      +                        // don't squeeze out UNC patterns
      +                        if (i === 1 && p === '' && parts[0] === '')
      +                            continue;
      +                        if (p === '.' || p === '') {
      +                            didSomething = true;
      +                            parts.splice(i, 1);
      +                            i--;
      +                        }
      +                    }
      +                    if (parts[0] === '.' &&
      +                        parts.length === 2 &&
      +                        (parts[1] === '.' || parts[1] === '')) {
      +                        didSomething = true;
      +                        parts.pop();
      +                    }
      +                }
      +                // 
      /

      /../ ->

      /
      +                let dd = 0;
      +                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
      +                    const p = parts[dd - 1];
      +                    if (p && p !== '.' && p !== '..' && p !== '**') {
      +                        didSomething = true;
      +                        const needDot = dd === 1 && parts[dd + 1] === '**';
      +                        const splin = needDot ? ['.'] : [];
      +                        parts.splice(dd - 1, 2, ...splin);
      +                        if (parts.length === 0)
      +                            parts.push('');
      +                        dd -= 2;
      +                    }
      +                }
      +            }
      +        } while (didSomething);
      +        return globParts;
      +    }
      +    // second phase: multi-pattern dedupes
      +    // {
      /*/,
      /

      /} ->

      /*/
      +    // {
      /,
      /} -> 
      /
      +    // {
      /**/,
      /} -> 
      /**/
      +    //
      +    // {
      /**/,
      /**/

      /} ->

      /**/
      +    // ^-- not valid because ** doens't follow symlinks
      +    secondPhasePreProcess(globParts) {
      +        for (let i = 0; i < globParts.length - 1; i++) {
      +            for (let j = i + 1; j < globParts.length; j++) {
      +                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
      +                if (!matched)
      +                    continue;
      +                globParts[i] = matched;
      +                globParts[j] = [];
      +            }
      +        }
      +        return globParts.filter(gs => gs.length);
      +    }
      +    partsMatch(a, b, emptyGSMatch = false) {
      +        let ai = 0;
      +        let bi = 0;
      +        let result = [];
      +        let which = '';
      +        while (ai < a.length && bi < b.length) {
      +            if (a[ai] === b[bi]) {
      +                result.push(which === 'b' ? b[bi] : a[ai]);
      +                ai++;
      +                bi++;
      +            }
      +            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
      +                result.push(a[ai]);
      +                ai++;
      +            }
      +            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
      +                result.push(b[bi]);
      +                bi++;
      +            }
      +            else if (a[ai] === '*' &&
      +                b[bi] &&
      +                (this.options.dot || !b[bi].startsWith('.')) &&
      +                b[bi] !== '**') {
      +                if (which === 'b')
      +                    return false;
      +                which = 'a';
      +                result.push(a[ai]);
      +                ai++;
      +                bi++;
      +            }
      +            else if (b[bi] === '*' &&
      +                a[ai] &&
      +                (this.options.dot || !a[ai].startsWith('.')) &&
      +                a[ai] !== '**') {
      +                if (which === 'a')
      +                    return false;
      +                which = 'b';
      +                result.push(b[bi]);
      +                ai++;
      +                bi++;
      +            }
      +            else {
      +                return false;
      +            }
      +        }
      +        // if we fall out of the loop, it means they two are identical
      +        // as long as their lengths match
      +        return a.length === b.length && result;
      +    }
      +    parseNegate() {
      +        if (this.nonegate)
      +            return;
      +        const pattern = this.pattern;
      +        let negate = false;
      +        let negateOffset = 0;
      +        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
      +            negate = !negate;
      +            negateOffset++;
      +        }
      +        if (negateOffset)
      +            this.pattern = pattern.slice(negateOffset);
      +        this.negate = negate;
      +    }
      +    // set partial to true to test if, for example,
      +    // "/a/b" matches the start of "/*/b/*/d"
      +    // Partial means, if you run out of file before you run
      +    // out of pattern, then that's fine, as long as all
      +    // the parts match.
      +    matchOne(file, pattern, partial = false) {
      +        const options = this.options;
      +        // UNC paths like //?/X:/... can match X:/... and vice versa
      +        // Drive letters in absolute drive or unc paths are always compared
      +        // case-insensitively.
      +        if (this.isWindows) {
      +            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
      +            const fileUNC = !fileDrive &&
      +                file[0] === '' &&
      +                file[1] === '' &&
      +                file[2] === '?' &&
      +                /^[a-z]:$/i.test(file[3]);
      +            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
      +            const patternUNC = !patternDrive &&
      +                pattern[0] === '' &&
      +                pattern[1] === '' &&
      +                pattern[2] === '?' &&
      +                typeof pattern[3] === 'string' &&
      +                /^[a-z]:$/i.test(pattern[3]);
      +            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
      +            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
      +            if (typeof fdi === 'number' && typeof pdi === 'number') {
      +                const [fd, pd] = [file[fdi], pattern[pdi]];
      +                if (fd.toLowerCase() === pd.toLowerCase()) {
      +                    pattern[pdi] = fd;
      +                    if (pdi > fdi) {
      +                        pattern = pattern.slice(pdi);
      +                    }
      +                    else if (fdi > pdi) {
      +                        file = file.slice(fdi);
      +                    }
      +                }
      +            }
      +        }
      +        // resolve and reduce . and .. portions in the file as well.
      +        // dont' need to do the second phase, because it's only one string[]
      +        const { optimizationLevel = 1 } = this.options;
      +        if (optimizationLevel >= 2) {
      +            file = this.levelTwoFileOptimize(file);
      +        }
      +        this.debug('matchOne', this, { file, pattern });
      +        this.debug('matchOne', file.length, pattern.length);
      +        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
      +            this.debug('matchOne loop');
      +            var p = pattern[pi];
      +            var f = file[fi];
      +            this.debug(pattern, p, f);
      +            // should be impossible.
      +            // some invalid regexp stuff in the set.
      +            /* c8 ignore start */
      +            if (p === false) {
      +                return false;
      +            }
      +            /* c8 ignore stop */
      +            if (p === exports.GLOBSTAR) {
      +                this.debug('GLOBSTAR', [pattern, p, f]);
      +                // "**"
      +                // a/**/b/**/c would match the following:
      +                // a/b/x/y/z/c
      +                // a/x/y/z/b/c
      +                // a/b/x/b/x/c
      +                // a/b/c
      +                // To do this, take the rest of the pattern after
      +                // the **, and see if it would match the file remainder.
      +                // If so, return success.
      +                // If not, the ** "swallows" a segment, and try again.
      +                // This is recursively awful.
      +                //
      +                // a/**/b/**/c matching a/b/x/y/z/c
      +                // - a matches a
      +                // - doublestar
      +                //   - matchOne(b/x/y/z/c, b/**/c)
      +                //     - b matches b
      +                //     - doublestar
      +                //       - matchOne(x/y/z/c, c) -> no
      +                //       - matchOne(y/z/c, c) -> no
      +                //       - matchOne(z/c, c) -> no
      +                //       - matchOne(c, c) yes, hit
      +                var fr = fi;
      +                var pr = pi + 1;
      +                if (pr === pl) {
      +                    this.debug('** at the end');
      +                    // a ** at the end will just swallow the rest.
      +                    // We have found a match.
      +                    // however, it will not swallow /.x, unless
      +                    // options.dot is set.
      +                    // . and .. are *never* matched by **, for explosively
      +                    // exponential reasons.
      +                    for (; fi < fl; fi++) {
      +                        if (file[fi] === '.' ||
      +                            file[fi] === '..' ||
      +                            (!options.dot && file[fi].charAt(0) === '.'))
      +                            return false;
      +                    }
      +                    return true;
      +                }
      +                // ok, let's see if we can swallow whatever we can.
      +                while (fr < fl) {
      +                    var swallowee = file[fr];
      +                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
      +                    // XXX remove this slice.  Just pass the start index.
      +                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
      +                        this.debug('globstar found match!', fr, fl, swallowee);
      +                        // found a match.
      +                        return true;
      +                    }
      +                    else {
      +                        // can't swallow "." or ".." ever.
      +                        // can only swallow ".foo" when explicitly asked.
      +                        if (swallowee === '.' ||
      +                            swallowee === '..' ||
      +                            (!options.dot && swallowee.charAt(0) === '.')) {
      +                            this.debug('dot detected!', file, fr, pattern, pr);
      +                            break;
      +                        }
      +                        // ** swallows a segment, and continue.
      +                        this.debug('globstar swallow a segment, and continue');
      +                        fr++;
      +                    }
      +                }
      +                // no match was found.
      +                // However, in partial mode, we can't say this is necessarily over.
      +                /* c8 ignore start */
      +                if (partial) {
      +                    // ran out of file
      +                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
      +                    if (fr === fl) {
      +                        return true;
      +                    }
      +                }
      +                /* c8 ignore stop */
      +                return false;
      +            }
      +            // something other than **
      +            // non-magic patterns just have to match exactly
      +            // patterns with magic have been turned into regexps.
      +            let hit;
      +            if (typeof p === 'string') {
      +                hit = f === p;
      +                this.debug('string match', p, f, hit);
      +            }
      +            else {
      +                hit = p.test(f);
      +                this.debug('pattern match', p, f, hit);
      +            }
      +            if (!hit)
      +                return false;
      +        }
      +        // Note: ending in / means that we'll get a final ""
      +        // at the end of the pattern.  This can only match a
      +        // corresponding "" at the end of the file.
      +        // If the file ends in /, then it can only match a
      +        // a pattern that ends in /, unless the pattern just
      +        // doesn't have any more for it. But, a/b/ should *not*
      +        // match "a/b/*", even though "" matches against the
      +        // [^/]*? pattern, except in partial mode, where it might
      +        // simply not be reached yet.
      +        // However, a/b/ should still satisfy a/*
      +        // now either we fell off the end of the pattern, or we're done.
      +        if (fi === fl && pi === pl) {
      +            // ran out of pattern and filename at the same time.
      +            // an exact hit!
      +            return true;
      +        }
      +        else if (fi === fl) {
      +            // ran out of file, but still had pattern left.
      +            // this is ok if we're doing the match as part of
      +            // a glob fs traversal.
      +            return partial;
      +        }
      +        else if (pi === pl) {
      +            // ran out of pattern, still have file left.
      +            // this is only acceptable if we're on the very last
      +            // empty segment of a file with a trailing slash.
      +            // a/* should match a/b/
      +            return fi === fl - 1 && file[fi] === '';
      +            /* c8 ignore start */
      +        }
      +        else {
      +            // should be unreachable.
      +            throw new Error('wtf?');
      +        }
      +        /* c8 ignore stop */
      +    }
      +    braceExpand() {
      +        return (0, exports.braceExpand)(this.pattern, this.options);
      +    }
      +    parse(pattern) {
      +        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
      +        const options = this.options;
      +        // shortcuts
      +        if (pattern === '**')
      +            return exports.GLOBSTAR;
      +        if (pattern === '')
      +            return '';
      +        // far and away, the most common glob pattern parts are
      +        // *, *.*, and *.  Add a fast check method for those.
      +        let m;
      +        let fastTest = null;
      +        if ((m = pattern.match(starRE))) {
      +            fastTest = options.dot ? starTestDot : starTest;
      +        }
      +        else if ((m = pattern.match(starDotExtRE))) {
      +            fastTest = (options.nocase
      +                ? options.dot
      +                    ? starDotExtTestNocaseDot
      +                    : starDotExtTestNocase
      +                : options.dot
      +                    ? starDotExtTestDot
      +                    : starDotExtTest)(m[1]);
      +        }
      +        else if ((m = pattern.match(qmarksRE))) {
      +            fastTest = (options.nocase
      +                ? options.dot
      +                    ? qmarksTestNocaseDot
      +                    : qmarksTestNocase
      +                : options.dot
      +                    ? qmarksTestDot
      +                    : qmarksTest)(m);
      +        }
      +        else if ((m = pattern.match(starDotStarRE))) {
      +            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
      +        }
      +        else if ((m = pattern.match(dotStarRE))) {
      +            fastTest = dotStarTest;
      +        }
      +        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
      +        return fastTest ? Object.assign(re, { test: fastTest }) : re;
      +    }
      +    makeRe() {
      +        if (this.regexp || this.regexp === false)
      +            return this.regexp;
      +        // at this point, this.set is a 2d array of partial
      +        // pattern strings, or "**".
      +        //
      +        // It's better to use .match().  This function shouldn't
      +        // be used, really, but it's pretty convenient sometimes,
      +        // when you just want to work with a regex.
      +        const set = this.set;
      +        if (!set.length) {
      +            this.regexp = false;
      +            return this.regexp;
      +        }
      +        const options = this.options;
      +        const twoStar = options.noglobstar
      +            ? star
      +            : options.dot
      +                ? twoStarDot
      +                : twoStarNoDot;
      +        const flags = new Set(options.nocase ? ['i'] : []);
      +        // regexpify non-globstar patterns
      +        // if ** is only item, then we just do one twoStar
      +        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
      +        // if ** is last, append (\/twoStar|) to previous
      +        // if ** is in the middle, append (\/|\/twoStar\/) to previous
      +        // then filter out GLOBSTAR symbols
      +        let re = set
      +            .map(pattern => {
      +            const pp = pattern.map(p => {
      +                if (p instanceof RegExp) {
      +                    for (const f of p.flags.split(''))
      +                        flags.add(f);
      +                }
      +                return typeof p === 'string'
      +                    ? regExpEscape(p)
      +                    : p === exports.GLOBSTAR
      +                        ? exports.GLOBSTAR
      +                        : p._src;
      +            });
      +            pp.forEach((p, i) => {
      +                const next = pp[i + 1];
      +                const prev = pp[i - 1];
      +                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
      +                    return;
      +                }
      +                if (prev === undefined) {
      +                    if (next !== undefined && next !== exports.GLOBSTAR) {
      +                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
      +                    }
      +                    else {
      +                        pp[i] = twoStar;
      +                    }
      +                }
      +                else if (next === undefined) {
      +                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
      +                }
      +                else if (next !== exports.GLOBSTAR) {
      +                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
      +                    pp[i + 1] = exports.GLOBSTAR;
      +                }
      +            });
      +            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
      +        })
      +            .join('|');
      +        // need to wrap in parens if we had more than one thing with |,
      +        // otherwise only the first will be anchored to ^ and the last to $
      +        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
      +        // must match entire pattern
      +        // ending in a * or ** will make it less strict.
      +        re = '^' + open + re + close + '$';
      +        // can match anything, as long as it's not this.
      +        if (this.negate)
      +            re = '^(?!' + re + ').+$';
      +        try {
      +            this.regexp = new RegExp(re, [...flags].join(''));
      +            /* c8 ignore start */
      +        }
      +        catch (ex) {
      +            // should be impossible
      +            this.regexp = false;
      +        }
      +        /* c8 ignore stop */
      +        return this.regexp;
      +    }
      +    slashSplit(p) {
      +        // if p starts with // on windows, we preserve that
      +        // so that UNC paths aren't broken.  Otherwise, any number of
      +        // / characters are coalesced into one, unless
      +        // preserveMultipleSlashes is set to true.
      +        if (this.preserveMultipleSlashes) {
      +            return p.split('/');
      +        }
      +        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
      +            // add an extra '' for the one we lose
      +            return ['', ...p.split(/\/+/)];
      +        }
      +        else {
      +            return p.split(/\/+/);
      +        }
      +    }
      +    match(f, partial = this.partial) {
      +        this.debug('match', f, this.pattern);
      +        // short-circuit in the case of busted things.
      +        // comments, etc.
      +        if (this.comment) {
      +            return false;
      +        }
      +        if (this.empty) {
      +            return f === '';
      +        }
      +        if (f === '/' && partial) {
      +            return true;
      +        }
      +        const options = this.options;
      +        // windows: need to use /, not \
      +        if (this.isWindows) {
      +            f = f.split('\\').join('/');
      +        }
      +        // treat the test path as a set of pathparts.
      +        const ff = this.slashSplit(f);
      +        this.debug(this.pattern, 'split', ff);
      +        // just ONE of the pattern sets in this.set needs to match
      +        // in order for it to be valid.  If negating, then just one
      +        // match means that we have failed.
      +        // Either way, return on the first hit.
      +        const set = this.set;
      +        this.debug(this.pattern, 'set', set);
      +        // Find the basename of the path by looking for the last non-empty segment
      +        let filename = ff[ff.length - 1];
      +        if (!filename) {
      +            for (let i = ff.length - 2; !filename && i >= 0; i--) {
      +                filename = ff[i];
      +            }
      +        }
      +        for (let i = 0; i < set.length; i++) {
      +            const pattern = set[i];
      +            let file = ff;
      +            if (options.matchBase && pattern.length === 1) {
      +                file = [filename];
      +            }
      +            const hit = this.matchOne(file, pattern, partial);
      +            if (hit) {
      +                if (options.flipNegate) {
      +                    return true;
      +                }
      +                return !this.negate;
      +            }
      +        }
      +        // didn't get any hits.  this is success if it's a negative
      +        // pattern, failure otherwise.
      +        if (options.flipNegate) {
      +            return false;
      +        }
      +        return this.negate;
      +    }
      +    static defaults(def) {
      +        return exports.minimatch.defaults(def).Minimatch;
      +    }
      +}
      +exports.Minimatch = Minimatch;
      +/* c8 ignore start */
      +var ast_js_2 = require("./ast.js");
      +Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
      +var escape_js_2 = require("./escape.js");
      +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
      +var unescape_js_2 = require("./unescape.js");
      +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
      +/* c8 ignore stop */
      +exports.minimatch.AST = ast_js_1.AST;
      +exports.minimatch.Minimatch = Minimatch;
      +exports.minimatch.escape = escape_js_1.escape;
      +exports.minimatch.unescape = unescape_js_1.unescape;
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
      new file mode 100644
      index 00000000000000..5bbefffbabee39
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "commonjs"
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
      new file mode 100644
      index 00000000000000..47c36bcee5a02a
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
      @@ -0,0 +1,24 @@
      +"use strict";
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.unescape = void 0;
      +/**
      + * Un-escape a string that has been escaped with {@link escape}.
      + *
      + * If the {@link windowsPathsNoEscape} option is used, then square-brace
      + * escapes are removed, but not backslash escapes.  For example, it will turn
      + * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
      + * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
      + *
      + * When `windowsPathsNoEscape` is not set, then both brace escapes and
      + * backslash escapes are removed.
      + *
      + * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
      + * or unescaped.
      + */
      +const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
      +    return windowsPathsNoEscape
      +        ? s.replace(/\[([^\/\\])\]/g, '$1')
      +        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
      +};
      +exports.unescape = unescape;
      +//# sourceMappingURL=unescape.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
      new file mode 100644
      index 00000000000000..7b534fc30200bb
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
      @@ -0,0 +1,10 @@
      +const MAX_PATTERN_LENGTH = 1024 * 64;
      +export const assertValidPattern = (pattern) => {
      +    if (typeof pattern !== 'string') {
      +        throw new TypeError('invalid pattern');
      +    }
      +    if (pattern.length > MAX_PATTERN_LENGTH) {
      +        throw new TypeError('pattern is too long');
      +    }
      +};
      +//# sourceMappingURL=assert-valid-pattern.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
      new file mode 100644
      index 00000000000000..7fb1f83e6182a0
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
      @@ -0,0 +1,585 @@
      +// parse a single path portion
      +import { parseClass } from './brace-expressions.js';
      +import { unescape } from './unescape.js';
      +const types = new Set(['!', '?', '+', '*', '@']);
      +const isExtglobType = (c) => types.has(c);
      +// Patterns that get prepended to bind to the start of either the
      +// entire string, or just a single path portion, to prevent dots
      +// and/or traversal patterns, when needed.
      +// Exts don't need the ^ or / bit, because the root binds that already.
      +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
      +const startNoDot = '(?!\\.)';
      +// characters that indicate a start of pattern needs the "no dots" bit,
      +// because a dot *might* be matched. ( is not in the list, because in
      +// the case of a child extglob, it will handle the prevention itself.
      +const addPatternStart = new Set(['[', '.']);
      +// cases where traversal is A-OK, no dot prevention needed
      +const justDots = new Set(['..', '.']);
      +const reSpecials = new Set('().*{}+?[]^$\\!');
      +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
      +// any single thing other than /
      +const qmark = '[^/]';
      +// * => any number of characters
      +const star = qmark + '*?';
      +// use + when we need to ensure that *something* matches, because the * is
      +// the only thing in the path portion.
      +const starNoEmpty = qmark + '+?';
      +// remove the \ chars that we added if we end up doing a nonmagic compare
      +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
      +export class AST {
      +    type;
      +    #root;
      +    #hasMagic;
      +    #uflag = false;
      +    #parts = [];
      +    #parent;
      +    #parentIndex;
      +    #negs;
      +    #filledNegs = false;
      +    #options;
      +    #toString;
      +    // set to true if it's an extglob with no children
      +    // (which really means one child of '')
      +    #emptyExt = false;
      +    constructor(type, parent, options = {}) {
      +        this.type = type;
      +        // extglobs are inherently magical
      +        if (type)
      +            this.#hasMagic = true;
      +        this.#parent = parent;
      +        this.#root = this.#parent ? this.#parent.#root : this;
      +        this.#options = this.#root === this ? options : this.#root.#options;
      +        this.#negs = this.#root === this ? [] : this.#root.#negs;
      +        if (type === '!' && !this.#root.#filledNegs)
      +            this.#negs.push(this);
      +        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
      +    }
      +    get hasMagic() {
      +        /* c8 ignore start */
      +        if (this.#hasMagic !== undefined)
      +            return this.#hasMagic;
      +        /* c8 ignore stop */
      +        for (const p of this.#parts) {
      +            if (typeof p === 'string')
      +                continue;
      +            if (p.type || p.hasMagic)
      +                return (this.#hasMagic = true);
      +        }
      +        // note: will be undefined until we generate the regexp src and find out
      +        return this.#hasMagic;
      +    }
      +    // reconstructs the pattern
      +    toString() {
      +        if (this.#toString !== undefined)
      +            return this.#toString;
      +        if (!this.type) {
      +            return (this.#toString = this.#parts.map(p => String(p)).join(''));
      +        }
      +        else {
      +            return (this.#toString =
      +                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
      +        }
      +    }
      +    #fillNegs() {
      +        /* c8 ignore start */
      +        if (this !== this.#root)
      +            throw new Error('should only call on root');
      +        if (this.#filledNegs)
      +            return this;
      +        /* c8 ignore stop */
      +        // call toString() once to fill this out
      +        this.toString();
      +        this.#filledNegs = true;
      +        let n;
      +        while ((n = this.#negs.pop())) {
      +            if (n.type !== '!')
      +                continue;
      +            // walk up the tree, appending everthing that comes AFTER parentIndex
      +            let p = n;
      +            let pp = p.#parent;
      +            while (pp) {
      +                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
      +                    for (const part of n.#parts) {
      +                        /* c8 ignore start */
      +                        if (typeof part === 'string') {
      +                            throw new Error('string part in extglob AST??');
      +                        }
      +                        /* c8 ignore stop */
      +                        part.copyIn(pp.#parts[i]);
      +                    }
      +                }
      +                p = pp;
      +                pp = p.#parent;
      +            }
      +        }
      +        return this;
      +    }
      +    push(...parts) {
      +        for (const p of parts) {
      +            if (p === '')
      +                continue;
      +            /* c8 ignore start */
      +            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
      +                throw new Error('invalid part: ' + p);
      +            }
      +            /* c8 ignore stop */
      +            this.#parts.push(p);
      +        }
      +    }
      +    toJSON() {
      +        const ret = this.type === null
      +            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
      +            : [this.type, ...this.#parts.map(p => p.toJSON())];
      +        if (this.isStart() && !this.type)
      +            ret.unshift([]);
      +        if (this.isEnd() &&
      +            (this === this.#root ||
      +                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
      +            ret.push({});
      +        }
      +        return ret;
      +    }
      +    isStart() {
      +        if (this.#root === this)
      +            return true;
      +        // if (this.type) return !!this.#parent?.isStart()
      +        if (!this.#parent?.isStart())
      +            return false;
      +        if (this.#parentIndex === 0)
      +            return true;
      +        // if everything AHEAD of this is a negation, then it's still the "start"
      +        const p = this.#parent;
      +        for (let i = 0; i < this.#parentIndex; i++) {
      +            const pp = p.#parts[i];
      +            if (!(pp instanceof AST && pp.type === '!')) {
      +                return false;
      +            }
      +        }
      +        return true;
      +    }
      +    isEnd() {
      +        if (this.#root === this)
      +            return true;
      +        if (this.#parent?.type === '!')
      +            return true;
      +        if (!this.#parent?.isEnd())
      +            return false;
      +        if (!this.type)
      +            return this.#parent?.isEnd();
      +        // if not root, it'll always have a parent
      +        /* c8 ignore start */
      +        const pl = this.#parent ? this.#parent.#parts.length : 0;
      +        /* c8 ignore stop */
      +        return this.#parentIndex === pl - 1;
      +    }
      +    copyIn(part) {
      +        if (typeof part === 'string')
      +            this.push(part);
      +        else
      +            this.push(part.clone(this));
      +    }
      +    clone(parent) {
      +        const c = new AST(this.type, parent);
      +        for (const p of this.#parts) {
      +            c.copyIn(p);
      +        }
      +        return c;
      +    }
      +    static #parseAST(str, ast, pos, opt) {
      +        let escaping = false;
      +        let inBrace = false;
      +        let braceStart = -1;
      +        let braceNeg = false;
      +        if (ast.type === null) {
      +            // outside of a extglob, append until we find a start
      +            let i = pos;
      +            let acc = '';
      +            while (i < str.length) {
      +                const c = str.charAt(i++);
      +                // still accumulate escapes at this point, but we do ignore
      +                // starts that are escaped
      +                if (escaping || c === '\\') {
      +                    escaping = !escaping;
      +                    acc += c;
      +                    continue;
      +                }
      +                if (inBrace) {
      +                    if (i === braceStart + 1) {
      +                        if (c === '^' || c === '!') {
      +                            braceNeg = true;
      +                        }
      +                    }
      +                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
      +                        inBrace = false;
      +                    }
      +                    acc += c;
      +                    continue;
      +                }
      +                else if (c === '[') {
      +                    inBrace = true;
      +                    braceStart = i;
      +                    braceNeg = false;
      +                    acc += c;
      +                    continue;
      +                }
      +                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
      +                    ast.push(acc);
      +                    acc = '';
      +                    const ext = new AST(c, ast);
      +                    i = AST.#parseAST(str, ext, i, opt);
      +                    ast.push(ext);
      +                    continue;
      +                }
      +                acc += c;
      +            }
      +            ast.push(acc);
      +            return i;
      +        }
      +        // some kind of extglob, pos is at the (
      +        // find the next | or )
      +        let i = pos + 1;
      +        let part = new AST(null, ast);
      +        const parts = [];
      +        let acc = '';
      +        while (i < str.length) {
      +            const c = str.charAt(i++);
      +            // still accumulate escapes at this point, but we do ignore
      +            // starts that are escaped
      +            if (escaping || c === '\\') {
      +                escaping = !escaping;
      +                acc += c;
      +                continue;
      +            }
      +            if (inBrace) {
      +                if (i === braceStart + 1) {
      +                    if (c === '^' || c === '!') {
      +                        braceNeg = true;
      +                    }
      +                }
      +                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
      +                    inBrace = false;
      +                }
      +                acc += c;
      +                continue;
      +            }
      +            else if (c === '[') {
      +                inBrace = true;
      +                braceStart = i;
      +                braceNeg = false;
      +                acc += c;
      +                continue;
      +            }
      +            if (isExtglobType(c) && str.charAt(i) === '(') {
      +                part.push(acc);
      +                acc = '';
      +                const ext = new AST(c, part);
      +                part.push(ext);
      +                i = AST.#parseAST(str, ext, i, opt);
      +                continue;
      +            }
      +            if (c === '|') {
      +                part.push(acc);
      +                acc = '';
      +                parts.push(part);
      +                part = new AST(null, ast);
      +                continue;
      +            }
      +            if (c === ')') {
      +                if (acc === '' && ast.#parts.length === 0) {
      +                    ast.#emptyExt = true;
      +                }
      +                part.push(acc);
      +                acc = '';
      +                ast.push(...parts, part);
      +                return i;
      +            }
      +            acc += c;
      +        }
      +        // unfinished extglob
      +        // if we got here, it was a malformed extglob! not an extglob, but
      +        // maybe something else in there.
      +        ast.type = null;
      +        ast.#hasMagic = undefined;
      +        ast.#parts = [str.substring(pos - 1)];
      +        return i;
      +    }
      +    static fromGlob(pattern, options = {}) {
      +        const ast = new AST(null, undefined, options);
      +        AST.#parseAST(pattern, ast, 0, options);
      +        return ast;
      +    }
      +    // returns the regular expression if there's magic, or the unescaped
      +    // string if not.
      +    toMMPattern() {
      +        // should only be called on root
      +        /* c8 ignore start */
      +        if (this !== this.#root)
      +            return this.#root.toMMPattern();
      +        /* c8 ignore stop */
      +        const glob = this.toString();
      +        const [re, body, hasMagic, uflag] = this.toRegExpSource();
      +        // if we're in nocase mode, and not nocaseMagicOnly, then we do
      +        // still need a regular expression if we have to case-insensitively
      +        // match capital/lowercase characters.
      +        const anyMagic = hasMagic ||
      +            this.#hasMagic ||
      +            (this.#options.nocase &&
      +                !this.#options.nocaseMagicOnly &&
      +                glob.toUpperCase() !== glob.toLowerCase());
      +        if (!anyMagic) {
      +            return body;
      +        }
      +        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
      +        return Object.assign(new RegExp(`^${re}$`, flags), {
      +            _src: re,
      +            _glob: glob,
      +        });
      +    }
      +    // returns the string match, the regexp source, whether there's magic
      +    // in the regexp (so a regular expression is required) and whether or
      +    // not the uflag is needed for the regular expression (for posix classes)
      +    // TODO: instead of injecting the start/end at this point, just return
      +    // the BODY of the regexp, along with the start/end portions suitable
      +    // for binding the start/end in either a joined full-path makeRe context
      +    // (where we bind to (^|/), or a standalone matchPart context (where
      +    // we bind to ^, and not /).  Otherwise slashes get duped!
      +    //
      +    // In part-matching mode, the start is:
      +    // - if not isStart: nothing
      +    // - if traversal possible, but not allowed: ^(?!\.\.?$)
      +    // - if dots allowed or not possible: ^
      +    // - if dots possible and not allowed: ^(?!\.)
      +    // end is:
      +    // - if not isEnd(): nothing
      +    // - else: $
      +    //
      +    // In full-path matching mode, we put the slash at the START of the
      +    // pattern, so start is:
      +    // - if first pattern: same as part-matching mode
      +    // - if not isStart(): nothing
      +    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
      +    // - if dots allowed or not possible: /
      +    // - if dots possible and not allowed: /(?!\.)
      +    // end is:
      +    // - if last pattern, same as part-matching mode
      +    // - else nothing
      +    //
      +    // Always put the (?:$|/) on negated tails, though, because that has to be
      +    // there to bind the end of the negated pattern portion, and it's easier to
      +    // just stick it in now rather than try to inject it later in the middle of
      +    // the pattern.
      +    //
      +    // We can just always return the same end, and leave it up to the caller
      +    // to know whether it's going to be used joined or in parts.
      +    // And, if the start is adjusted slightly, can do the same there:
      +    // - if not isStart: nothing
      +    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
      +    // - if dots allowed or not possible: (?:/|^)
      +    // - if dots possible and not allowed: (?:/|^)(?!\.)
      +    //
      +    // But it's better to have a simpler binding without a conditional, for
      +    // performance, so probably better to return both start options.
      +    //
      +    // Then the caller just ignores the end if it's not the first pattern,
      +    // and the start always gets applied.
      +    //
      +    // But that's always going to be $ if it's the ending pattern, or nothing,
      +    // so the caller can just attach $ at the end of the pattern when building.
      +    //
      +    // So the todo is:
      +    // - better detect what kind of start is needed
      +    // - return both flavors of starting pattern
      +    // - attach $ at the end of the pattern when creating the actual RegExp
      +    //
      +    // Ah, but wait, no, that all only applies to the root when the first pattern
      +    // is not an extglob. If the first pattern IS an extglob, then we need all
      +    // that dot prevention biz to live in the extglob portions, because eg
      +    // +(*|.x*) can match .xy but not .yx.
      +    //
      +    // So, return the two flavors if it's #root and the first child is not an
      +    // AST, otherwise leave it to the child AST to handle it, and there,
      +    // use the (?:^|/) style of start binding.
      +    //
      +    // Even simplified further:
      +    // - Since the start for a join is eg /(?!\.) and the start for a part
      +    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
      +    // or start or whatever) and prepend ^ or / at the Regexp construction.
      +    toRegExpSource(allowDot) {
      +        const dot = allowDot ?? !!this.#options.dot;
      +        if (this.#root === this)
      +            this.#fillNegs();
      +        if (!this.type) {
      +            const noEmpty = this.isStart() && this.isEnd();
      +            const src = this.#parts
      +                .map(p => {
      +                const [re, _, hasMagic, uflag] = typeof p === 'string'
      +                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
      +                    : p.toRegExpSource(allowDot);
      +                this.#hasMagic = this.#hasMagic || hasMagic;
      +                this.#uflag = this.#uflag || uflag;
      +                return re;
      +            })
      +                .join('');
      +            let start = '';
      +            if (this.isStart()) {
      +                if (typeof this.#parts[0] === 'string') {
      +                    // this is the string that will match the start of the pattern,
      +                    // so we need to protect against dots and such.
      +                    // '.' and '..' cannot match unless the pattern is that exactly,
      +                    // even if it starts with . or dot:true is set.
      +                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
      +                    if (!dotTravAllowed) {
      +                        const aps = addPatternStart;
      +                        // check if we have a possibility of matching . or ..,
      +                        // and prevent that.
      +                        const needNoTrav =
      +                        // dots are allowed, and the pattern starts with [ or .
      +                        (dot && aps.has(src.charAt(0))) ||
      +                            // the pattern starts with \., and then [ or .
      +                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
      +                            // the pattern starts with \.\., and then [ or .
      +                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
      +                        // no need to prevent dots if it can't match a dot, or if a
      +                        // sub-pattern will be preventing it anyway.
      +                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
      +                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
      +                    }
      +                }
      +            }
      +            // append the "end of path portion" pattern to negation tails
      +            let end = '';
      +            if (this.isEnd() &&
      +                this.#root.#filledNegs &&
      +                this.#parent?.type === '!') {
      +                end = '(?:$|\\/)';
      +            }
      +            const final = start + src + end;
      +            return [
      +                final,
      +                unescape(src),
      +                (this.#hasMagic = !!this.#hasMagic),
      +                this.#uflag,
      +            ];
      +        }
      +        // We need to calculate the body *twice* if it's a repeat pattern
      +        // at the start, once in nodot mode, then again in dot mode, so a
      +        // pattern like *(?) can match 'x.y'
      +        const repeated = this.type === '*' || this.type === '+';
      +        // some kind of extglob
      +        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
      +        let body = this.#partsToRegExp(dot);
      +        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
      +            // invalid extglob, has to at least be *something* present, if it's
      +            // the entire path portion.
      +            const s = this.toString();
      +            this.#parts = [s];
      +            this.type = null;
      +            this.#hasMagic = undefined;
      +            return [s, unescape(this.toString()), false, false];
      +        }
      +        // XXX abstract out this map method
      +        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
      +            ? ''
      +            : this.#partsToRegExp(true);
      +        if (bodyDotAllowed === body) {
      +            bodyDotAllowed = '';
      +        }
      +        if (bodyDotAllowed) {
      +            body = `(?:${body})(?:${bodyDotAllowed})*?`;
      +        }
      +        // an empty !() is exactly equivalent to a starNoEmpty
      +        let final = '';
      +        if (this.type === '!' && this.#emptyExt) {
      +            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
      +        }
      +        else {
      +            const close = this.type === '!'
      +                ? // !() must match something,but !(x) can match ''
      +                    '))' +
      +                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
      +                        star +
      +                        ')'
      +                : this.type === '@'
      +                    ? ')'
      +                    : this.type === '?'
      +                        ? ')?'
      +                        : this.type === '+' && bodyDotAllowed
      +                            ? ')'
      +                            : this.type === '*' && bodyDotAllowed
      +                                ? `)?`
      +                                : `)${this.type}`;
      +            final = start + body + close;
      +        }
      +        return [
      +            final,
      +            unescape(body),
      +            (this.#hasMagic = !!this.#hasMagic),
      +            this.#uflag,
      +        ];
      +    }
      +    #partsToRegExp(dot) {
      +        return this.#parts
      +            .map(p => {
      +            // extglob ASTs should only contain parent ASTs
      +            /* c8 ignore start */
      +            if (typeof p === 'string') {
      +                throw new Error('string type in extglob ast??');
      +            }
      +            /* c8 ignore stop */
      +            // can ignore hasMagic, because extglobs are already always magic
      +            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
      +            this.#uflag = this.#uflag || uflag;
      +            return re;
      +        })
      +            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
      +            .join('|');
      +    }
      +    static #parseGlob(glob, hasMagic, noEmpty = false) {
      +        let escaping = false;
      +        let re = '';
      +        let uflag = false;
      +        for (let i = 0; i < glob.length; i++) {
      +            const c = glob.charAt(i);
      +            if (escaping) {
      +                escaping = false;
      +                re += (reSpecials.has(c) ? '\\' : '') + c;
      +                continue;
      +            }
      +            if (c === '\\') {
      +                if (i === glob.length - 1) {
      +                    re += '\\\\';
      +                }
      +                else {
      +                    escaping = true;
      +                }
      +                continue;
      +            }
      +            if (c === '[') {
      +                const [src, needUflag, consumed, magic] = parseClass(glob, i);
      +                if (consumed) {
      +                    re += src;
      +                    uflag = uflag || needUflag;
      +                    i += consumed - 1;
      +                    hasMagic = hasMagic || magic;
      +                    continue;
      +                }
      +            }
      +            if (c === '*') {
      +                if (noEmpty && glob === '*')
      +                    re += starNoEmpty;
      +                else
      +                    re += star;
      +                hasMagic = true;
      +                continue;
      +            }
      +            if (c === '?') {
      +                re += qmark;
      +                hasMagic = true;
      +                continue;
      +            }
      +            re += regExpEscape(c);
      +        }
      +        return [re, unescape(glob), !!hasMagic, uflag];
      +    }
      +}
      +//# sourceMappingURL=ast.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
      new file mode 100644
      index 00000000000000..c629d6ae816e27
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
      @@ -0,0 +1,148 @@
      +// translate the various posix character classes into unicode properties
      +// this works across all unicode locales
      +// { : [, /u flag required, negated]
      +const posixClasses = {
      +    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
      +    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
      +    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
      +    '[:blank:]': ['\\p{Zs}\\t', true],
      +    '[:cntrl:]': ['\\p{Cc}', true],
      +    '[:digit:]': ['\\p{Nd}', true],
      +    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
      +    '[:lower:]': ['\\p{Ll}', true],
      +    '[:print:]': ['\\p{C}', true],
      +    '[:punct:]': ['\\p{P}', true],
      +    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
      +    '[:upper:]': ['\\p{Lu}', true],
      +    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
      +    '[:xdigit:]': ['A-Fa-f0-9', false],
      +};
      +// only need to escape a few things inside of brace expressions
      +// escapes: [ \ ] -
      +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
      +// escape all regexp magic characters
      +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
      +// everything has already been escaped, we just have to join
      +const rangesToString = (ranges) => ranges.join('');
      +// takes a glob string at a posix brace expression, and returns
      +// an equivalent regular expression source, and boolean indicating
      +// whether the /u flag needs to be applied, and the number of chars
      +// consumed to parse the character class.
      +// This also removes out of order ranges, and returns ($.) if the
      +// entire class just no good.
      +export const parseClass = (glob, position) => {
      +    const pos = position;
      +    /* c8 ignore start */
      +    if (glob.charAt(pos) !== '[') {
      +        throw new Error('not in a brace expression');
      +    }
      +    /* c8 ignore stop */
      +    const ranges = [];
      +    const negs = [];
      +    let i = pos + 1;
      +    let sawStart = false;
      +    let uflag = false;
      +    let escaping = false;
      +    let negate = false;
      +    let endPos = pos;
      +    let rangeStart = '';
      +    WHILE: while (i < glob.length) {
      +        const c = glob.charAt(i);
      +        if ((c === '!' || c === '^') && i === pos + 1) {
      +            negate = true;
      +            i++;
      +            continue;
      +        }
      +        if (c === ']' && sawStart && !escaping) {
      +            endPos = i + 1;
      +            break;
      +        }
      +        sawStart = true;
      +        if (c === '\\') {
      +            if (!escaping) {
      +                escaping = true;
      +                i++;
      +                continue;
      +            }
      +            // escaped \ char, fall through and treat like normal char
      +        }
      +        if (c === '[' && !escaping) {
      +            // either a posix class, a collation equivalent, or just a [
      +            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
      +                if (glob.startsWith(cls, i)) {
      +                    // invalid, [a-[] is fine, but not [a-[:alpha]]
      +                    if (rangeStart) {
      +                        return ['$.', false, glob.length - pos, true];
      +                    }
      +                    i += cls.length;
      +                    if (neg)
      +                        negs.push(unip);
      +                    else
      +                        ranges.push(unip);
      +                    uflag = uflag || u;
      +                    continue WHILE;
      +                }
      +            }
      +        }
      +        // now it's just a normal character, effectively
      +        escaping = false;
      +        if (rangeStart) {
      +            // throw this range away if it's not valid, but others
      +            // can still match.
      +            if (c > rangeStart) {
      +                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
      +            }
      +            else if (c === rangeStart) {
      +                ranges.push(braceEscape(c));
      +            }
      +            rangeStart = '';
      +            i++;
      +            continue;
      +        }
      +        // now might be the start of a range.
      +        // can be either c-d or c-] or c] or c] at this point
      +        if (glob.startsWith('-]', i + 1)) {
      +            ranges.push(braceEscape(c + '-'));
      +            i += 2;
      +            continue;
      +        }
      +        if (glob.startsWith('-', i + 1)) {
      +            rangeStart = c;
      +            i += 2;
      +            continue;
      +        }
      +        // not the start of a range, just a single character
      +        ranges.push(braceEscape(c));
      +        i++;
      +    }
      +    if (endPos < i) {
      +        // didn't see the end of the class, not a valid class,
      +        // but might still be valid as a literal match.
      +        return ['', false, 0, false];
      +    }
      +    // if we got no ranges and no negates, then we have a range that
      +    // cannot possibly match anything, and that poisons the whole glob
      +    if (!ranges.length && !negs.length) {
      +        return ['$.', false, glob.length - pos, true];
      +    }
      +    // if we got one positive range, and it's a single character, then that's
      +    // not actually a magic pattern, it's just that one literal character.
      +    // we should not treat that as "magic", we should just return the literal
      +    // character. [_] is a perfectly valid way to escape glob magic chars.
      +    if (negs.length === 0 &&
      +        ranges.length === 1 &&
      +        /^\\?.$/.test(ranges[0]) &&
      +        !negate) {
      +        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
      +        return [regexpEscape(r), false, endPos - pos, false];
      +    }
      +    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
      +    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
      +    const comb = ranges.length && negs.length
      +        ? '(' + sranges + '|' + snegs + ')'
      +        : ranges.length
      +            ? sranges
      +            : snegs;
      +    return [comb, uflag, endPos - pos, true];
      +};
      +//# sourceMappingURL=brace-expressions.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
      new file mode 100644
      index 00000000000000..16f7c8c7bdc646
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
      @@ -0,0 +1,18 @@
      +/**
      + * Escape all magic characters in a glob pattern.
      + *
      + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
      + * option is used, then characters are escaped by wrapping in `[]`, because
      + * a magic character wrapped in a character class can only be satisfied by
      + * that exact character.  In this mode, `\` is _not_ escaped, because it is
      + * not interpreted as a magic character, but instead as a path separator.
      + */
      +export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
      +    // don't need to escape +@! because we escape the parens
      +    // that make those magic, and escaping ! as [!] isn't valid,
      +    // because [!]] is a valid glob class meaning not ']'.
      +    return windowsPathsNoEscape
      +        ? s.replace(/[?*()[\]]/g, '[$&]')
      +        : s.replace(/[?*()[\]\\]/g, '\\$&');
      +};
      +//# sourceMappingURL=escape.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
      new file mode 100644
      index 00000000000000..831b6a67f63fb4
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
      @@ -0,0 +1,995 @@
      +import expand from 'brace-expansion';
      +import { assertValidPattern } from './assert-valid-pattern.js';
      +import { AST } from './ast.js';
      +import { escape } from './escape.js';
      +import { unescape } from './unescape.js';
      +export const minimatch = (p, pattern, options = {}) => {
      +    assertValidPattern(pattern);
      +    // shortcut: comments match nothing.
      +    if (!options.nocomment && pattern.charAt(0) === '#') {
      +        return false;
      +    }
      +    return new Minimatch(pattern, options).match(p);
      +};
      +// Optimized checking for the most common glob patterns.
      +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
      +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
      +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
      +const starDotExtTestNocase = (ext) => {
      +    ext = ext.toLowerCase();
      +    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
      +};
      +const starDotExtTestNocaseDot = (ext) => {
      +    ext = ext.toLowerCase();
      +    return (f) => f.toLowerCase().endsWith(ext);
      +};
      +const starDotStarRE = /^\*+\.\*+$/;
      +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
      +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
      +const dotStarRE = /^\.\*+$/;
      +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
      +const starRE = /^\*+$/;
      +const starTest = (f) => f.length !== 0 && !f.startsWith('.');
      +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
      +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
      +const qmarksTestNocase = ([$0, ext = '']) => {
      +    const noext = qmarksTestNoExt([$0]);
      +    if (!ext)
      +        return noext;
      +    ext = ext.toLowerCase();
      +    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
      +};
      +const qmarksTestNocaseDot = ([$0, ext = '']) => {
      +    const noext = qmarksTestNoExtDot([$0]);
      +    if (!ext)
      +        return noext;
      +    ext = ext.toLowerCase();
      +    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
      +};
      +const qmarksTestDot = ([$0, ext = '']) => {
      +    const noext = qmarksTestNoExtDot([$0]);
      +    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
      +};
      +const qmarksTest = ([$0, ext = '']) => {
      +    const noext = qmarksTestNoExt([$0]);
      +    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
      +};
      +const qmarksTestNoExt = ([$0]) => {
      +    const len = $0.length;
      +    return (f) => f.length === len && !f.startsWith('.');
      +};
      +const qmarksTestNoExtDot = ([$0]) => {
      +    const len = $0.length;
      +    return (f) => f.length === len && f !== '.' && f !== '..';
      +};
      +/* c8 ignore start */
      +const defaultPlatform = (typeof process === 'object' && process
      +    ? (typeof process.env === 'object' &&
      +        process.env &&
      +        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
      +        process.platform
      +    : 'posix');
      +const path = {
      +    win32: { sep: '\\' },
      +    posix: { sep: '/' },
      +};
      +/* c8 ignore stop */
      +export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
      +minimatch.sep = sep;
      +export const GLOBSTAR = Symbol('globstar **');
      +minimatch.GLOBSTAR = GLOBSTAR;
      +// any single thing other than /
      +// don't need to escape / when using new RegExp()
      +const qmark = '[^/]';
      +// * => any number of characters
      +const star = qmark + '*?';
      +// ** when dots are allowed.  Anything goes, except .. and .
      +// not (^ or / followed by one or two dots followed by $ or /),
      +// followed by anything, any number of times.
      +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
      +// not a ^ or / followed by a dot,
      +// followed by anything, any number of times.
      +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
      +export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
      +minimatch.filter = filter;
      +const ext = (a, b = {}) => Object.assign({}, a, b);
      +export const defaults = (def) => {
      +    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
      +        return minimatch;
      +    }
      +    const orig = minimatch;
      +    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
      +    return Object.assign(m, {
      +        Minimatch: class Minimatch extends orig.Minimatch {
      +            constructor(pattern, options = {}) {
      +                super(pattern, ext(def, options));
      +            }
      +            static defaults(options) {
      +                return orig.defaults(ext(def, options)).Minimatch;
      +            }
      +        },
      +        AST: class AST extends orig.AST {
      +            /* c8 ignore start */
      +            constructor(type, parent, options = {}) {
      +                super(type, parent, ext(def, options));
      +            }
      +            /* c8 ignore stop */
      +            static fromGlob(pattern, options = {}) {
      +                return orig.AST.fromGlob(pattern, ext(def, options));
      +            }
      +        },
      +        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
      +        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
      +        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
      +        defaults: (options) => orig.defaults(ext(def, options)),
      +        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
      +        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
      +        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
      +        sep: orig.sep,
      +        GLOBSTAR: GLOBSTAR,
      +    });
      +};
      +minimatch.defaults = defaults;
      +// Brace expansion:
      +// a{b,c}d -> abd acd
      +// a{b,}c -> abc ac
      +// a{0..3}d -> a0d a1d a2d a3d
      +// a{b,c{d,e}f}g -> abg acdfg acefg
      +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
      +//
      +// Invalid sets are not expanded.
      +// a{2..}b -> a{2..}b
      +// a{b}c -> a{b}c
      +export const braceExpand = (pattern, options = {}) => {
      +    assertValidPattern(pattern);
      +    // Thanks to Yeting Li  for
      +    // improving this regexp to avoid a ReDOS vulnerability.
      +    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
      +        // shortcut. no need to expand.
      +        return [pattern];
      +    }
      +    return expand(pattern);
      +};
      +minimatch.braceExpand = braceExpand;
      +// parse a component of the expanded set.
      +// At this point, no pattern may contain "/" in it
      +// so we're going to return a 2d array, where each entry is the full
      +// pattern, split on '/', and then turned into a regular expression.
      +// A regexp is made at the end which joins each array with an
      +// escaped /, and another full one which joins each regexp with |.
      +//
      +// Following the lead of Bash 4.1, note that "**" only has special meaning
      +// when it is the *only* thing in a path portion.  Otherwise, any series
      +// of * is equivalent to a single *.  Globstar behavior is enabled by
      +// default, and can be disabled by setting options.noglobstar.
      +export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
      +minimatch.makeRe = makeRe;
      +export const match = (list, pattern, options = {}) => {
      +    const mm = new Minimatch(pattern, options);
      +    list = list.filter(f => mm.match(f));
      +    if (mm.options.nonull && !list.length) {
      +        list.push(pattern);
      +    }
      +    return list;
      +};
      +minimatch.match = match;
      +// replace stuff like \* with *
      +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
      +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
      +export class Minimatch {
      +    options;
      +    set;
      +    pattern;
      +    windowsPathsNoEscape;
      +    nonegate;
      +    negate;
      +    comment;
      +    empty;
      +    preserveMultipleSlashes;
      +    partial;
      +    globSet;
      +    globParts;
      +    nocase;
      +    isWindows;
      +    platform;
      +    windowsNoMagicRoot;
      +    regexp;
      +    constructor(pattern, options = {}) {
      +        assertValidPattern(pattern);
      +        options = options || {};
      +        this.options = options;
      +        this.pattern = pattern;
      +        this.platform = options.platform || defaultPlatform;
      +        this.isWindows = this.platform === 'win32';
      +        this.windowsPathsNoEscape =
      +            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
      +        if (this.windowsPathsNoEscape) {
      +            this.pattern = this.pattern.replace(/\\/g, '/');
      +        }
      +        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
      +        this.regexp = null;
      +        this.negate = false;
      +        this.nonegate = !!options.nonegate;
      +        this.comment = false;
      +        this.empty = false;
      +        this.partial = !!options.partial;
      +        this.nocase = !!this.options.nocase;
      +        this.windowsNoMagicRoot =
      +            options.windowsNoMagicRoot !== undefined
      +                ? options.windowsNoMagicRoot
      +                : !!(this.isWindows && this.nocase);
      +        this.globSet = [];
      +        this.globParts = [];
      +        this.set = [];
      +        // make the set of regexps etc.
      +        this.make();
      +    }
      +    hasMagic() {
      +        if (this.options.magicalBraces && this.set.length > 1) {
      +            return true;
      +        }
      +        for (const pattern of this.set) {
      +            for (const part of pattern) {
      +                if (typeof part !== 'string')
      +                    return true;
      +            }
      +        }
      +        return false;
      +    }
      +    debug(..._) { }
      +    make() {
      +        const pattern = this.pattern;
      +        const options = this.options;
      +        // empty patterns and comments match nothing.
      +        if (!options.nocomment && pattern.charAt(0) === '#') {
      +            this.comment = true;
      +            return;
      +        }
      +        if (!pattern) {
      +            this.empty = true;
      +            return;
      +        }
      +        // step 1: figure out negation, etc.
      +        this.parseNegate();
      +        // step 2: expand braces
      +        this.globSet = [...new Set(this.braceExpand())];
      +        if (options.debug) {
      +            this.debug = (...args) => console.error(...args);
      +        }
      +        this.debug(this.pattern, this.globSet);
      +        // step 3: now we have a set, so turn each one into a series of
      +        // path-portion matching patterns.
      +        // These will be regexps, except in the case of "**", which is
      +        // set to the GLOBSTAR object for globstar behavior,
      +        // and will not contain any / characters
      +        //
      +        // First, we preprocess to make the glob pattern sets a bit simpler
      +        // and deduped.  There are some perf-killing patterns that can cause
      +        // problems with a glob walk, but we can simplify them down a bit.
      +        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
      +        this.globParts = this.preprocess(rawGlobParts);
      +        this.debug(this.pattern, this.globParts);
      +        // glob --> regexps
      +        let set = this.globParts.map((s, _, __) => {
      +            if (this.isWindows && this.windowsNoMagicRoot) {
      +                // check if it's a drive or unc path.
      +                const isUNC = s[0] === '' &&
      +                    s[1] === '' &&
      +                    (s[2] === '?' || !globMagic.test(s[2])) &&
      +                    !globMagic.test(s[3]);
      +                const isDrive = /^[a-z]:/i.test(s[0]);
      +                if (isUNC) {
      +                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
      +                }
      +                else if (isDrive) {
      +                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
      +                }
      +            }
      +            return s.map(ss => this.parse(ss));
      +        });
      +        this.debug(this.pattern, set);
      +        // filter out everything that didn't compile properly.
      +        this.set = set.filter(s => s.indexOf(false) === -1);
      +        // do not treat the ? in UNC paths as magic
      +        if (this.isWindows) {
      +            for (let i = 0; i < this.set.length; i++) {
      +                const p = this.set[i];
      +                if (p[0] === '' &&
      +                    p[1] === '' &&
      +                    this.globParts[i][2] === '?' &&
      +                    typeof p[3] === 'string' &&
      +                    /^[a-z]:$/i.test(p[3])) {
      +                    p[2] = '?';
      +                }
      +            }
      +        }
      +        this.debug(this.pattern, this.set);
      +    }
      +    // various transforms to equivalent pattern sets that are
      +    // faster to process in a filesystem walk.  The goal is to
      +    // eliminate what we can, and push all ** patterns as far
      +    // to the right as possible, even if it increases the number
      +    // of patterns that we have to process.
      +    preprocess(globParts) {
      +        // if we're not in globstar mode, then turn all ** into *
      +        if (this.options.noglobstar) {
      +            for (let i = 0; i < globParts.length; i++) {
      +                for (let j = 0; j < globParts[i].length; j++) {
      +                    if (globParts[i][j] === '**') {
      +                        globParts[i][j] = '*';
      +                    }
      +                }
      +            }
      +        }
      +        const { optimizationLevel = 1 } = this.options;
      +        if (optimizationLevel >= 2) {
      +            // aggressive optimization for the purpose of fs walking
      +            globParts = this.firstPhasePreProcess(globParts);
      +            globParts = this.secondPhasePreProcess(globParts);
      +        }
      +        else if (optimizationLevel >= 1) {
      +            // just basic optimizations to remove some .. parts
      +            globParts = this.levelOneOptimize(globParts);
      +        }
      +        else {
      +            globParts = this.adjascentGlobstarOptimize(globParts);
      +        }
      +        return globParts;
      +    }
      +    // just get rid of adjascent ** portions
      +    adjascentGlobstarOptimize(globParts) {
      +        return globParts.map(parts => {
      +            let gs = -1;
      +            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
      +                let i = gs;
      +                while (parts[i + 1] === '**') {
      +                    i++;
      +                }
      +                if (i !== gs) {
      +                    parts.splice(gs, i - gs);
      +                }
      +            }
      +            return parts;
      +        });
      +    }
      +    // get rid of adjascent ** and resolve .. portions
      +    levelOneOptimize(globParts) {
      +        return globParts.map(parts => {
      +            parts = parts.reduce((set, part) => {
      +                const prev = set[set.length - 1];
      +                if (part === '**' && prev === '**') {
      +                    return set;
      +                }
      +                if (part === '..') {
      +                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
      +                        set.pop();
      +                        return set;
      +                    }
      +                }
      +                set.push(part);
      +                return set;
      +            }, []);
      +            return parts.length === 0 ? [''] : parts;
      +        });
      +    }
      +    levelTwoFileOptimize(parts) {
      +        if (!Array.isArray(parts)) {
      +            parts = this.slashSplit(parts);
      +        }
      +        let didSomething = false;
      +        do {
      +            didSomething = false;
      +            // 
      // -> 
      /
      +            if (!this.preserveMultipleSlashes) {
      +                for (let i = 1; i < parts.length - 1; i++) {
      +                    const p = parts[i];
      +                    // don't squeeze out UNC patterns
      +                    if (i === 1 && p === '' && parts[0] === '')
      +                        continue;
      +                    if (p === '.' || p === '') {
      +                        didSomething = true;
      +                        parts.splice(i, 1);
      +                        i--;
      +                    }
      +                }
      +                if (parts[0] === '.' &&
      +                    parts.length === 2 &&
      +                    (parts[1] === '.' || parts[1] === '')) {
      +                    didSomething = true;
      +                    parts.pop();
      +                }
      +            }
      +            // 
      /

      /../ ->

      /
      +            let dd = 0;
      +            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
      +                const p = parts[dd - 1];
      +                if (p && p !== '.' && p !== '..' && p !== '**') {
      +                    didSomething = true;
      +                    parts.splice(dd - 1, 2);
      +                    dd -= 2;
      +                }
      +            }
      +        } while (didSomething);
      +        return parts.length === 0 ? [''] : parts;
      +    }
      +    // First phase: single-pattern processing
      +    // 
       is 1 or more portions
      +    //  is 1 or more portions
      +    // 

      is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

      /**/../

      /

      / -> {

      /../

      /

      /,

      /**/

      /

      /} + //

      // -> 
      /
      +    // 
      /

      /../ ->

      /
      +    // **/**/ -> **/
      +    //
      +    // **/*/ -> */**/ <== not valid because ** doesn't follow
      +    // this WOULD be allowed if ** did follow symlinks, or * didn't
      +    firstPhasePreProcess(globParts) {
      +        let didSomething = false;
      +        do {
      +            didSomething = false;
      +            // 
      /**/../

      /

      / -> {

      /../

      /

      /,

      /**/

      /

      /} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

      /**/**/ -> 
      /**/
      +                        gss++;
      +                    }
      +                    // eg, if gs is 2 and gss is 4, that means we have 3 **
      +                    // parts, and can remove 2 of them.
      +                    if (gss > gs) {
      +                        parts.splice(gs + 1, gss - gs);
      +                    }
      +                    let next = parts[gs + 1];
      +                    const p = parts[gs + 2];
      +                    const p2 = parts[gs + 3];
      +                    if (next !== '..')
      +                        continue;
      +                    if (!p ||
      +                        p === '.' ||
      +                        p === '..' ||
      +                        !p2 ||
      +                        p2 === '.' ||
      +                        p2 === '..') {
      +                        continue;
      +                    }
      +                    didSomething = true;
      +                    // edit parts in place, and push the new one
      +                    parts.splice(gs, 1);
      +                    const other = parts.slice(0);
      +                    other[gs] = '**';
      +                    globParts.push(other);
      +                    gs--;
      +                }
      +                // 
      // -> 
      /
      +                if (!this.preserveMultipleSlashes) {
      +                    for (let i = 1; i < parts.length - 1; i++) {
      +                        const p = parts[i];
      +                        // don't squeeze out UNC patterns
      +                        if (i === 1 && p === '' && parts[0] === '')
      +                            continue;
      +                        if (p === '.' || p === '') {
      +                            didSomething = true;
      +                            parts.splice(i, 1);
      +                            i--;
      +                        }
      +                    }
      +                    if (parts[0] === '.' &&
      +                        parts.length === 2 &&
      +                        (parts[1] === '.' || parts[1] === '')) {
      +                        didSomething = true;
      +                        parts.pop();
      +                    }
      +                }
      +                // 
      /

      /../ ->

      /
      +                let dd = 0;
      +                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
      +                    const p = parts[dd - 1];
      +                    if (p && p !== '.' && p !== '..' && p !== '**') {
      +                        didSomething = true;
      +                        const needDot = dd === 1 && parts[dd + 1] === '**';
      +                        const splin = needDot ? ['.'] : [];
      +                        parts.splice(dd - 1, 2, ...splin);
      +                        if (parts.length === 0)
      +                            parts.push('');
      +                        dd -= 2;
      +                    }
      +                }
      +            }
      +        } while (didSomething);
      +        return globParts;
      +    }
      +    // second phase: multi-pattern dedupes
      +    // {
      /*/,
      /

      /} ->

      /*/
      +    // {
      /,
      /} -> 
      /
      +    // {
      /**/,
      /} -> 
      /**/
      +    //
      +    // {
      /**/,
      /**/

      /} ->

      /**/
      +    // ^-- not valid because ** doens't follow symlinks
      +    secondPhasePreProcess(globParts) {
      +        for (let i = 0; i < globParts.length - 1; i++) {
      +            for (let j = i + 1; j < globParts.length; j++) {
      +                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
      +                if (!matched)
      +                    continue;
      +                globParts[i] = matched;
      +                globParts[j] = [];
      +            }
      +        }
      +        return globParts.filter(gs => gs.length);
      +    }
      +    partsMatch(a, b, emptyGSMatch = false) {
      +        let ai = 0;
      +        let bi = 0;
      +        let result = [];
      +        let which = '';
      +        while (ai < a.length && bi < b.length) {
      +            if (a[ai] === b[bi]) {
      +                result.push(which === 'b' ? b[bi] : a[ai]);
      +                ai++;
      +                bi++;
      +            }
      +            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
      +                result.push(a[ai]);
      +                ai++;
      +            }
      +            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
      +                result.push(b[bi]);
      +                bi++;
      +            }
      +            else if (a[ai] === '*' &&
      +                b[bi] &&
      +                (this.options.dot || !b[bi].startsWith('.')) &&
      +                b[bi] !== '**') {
      +                if (which === 'b')
      +                    return false;
      +                which = 'a';
      +                result.push(a[ai]);
      +                ai++;
      +                bi++;
      +            }
      +            else if (b[bi] === '*' &&
      +                a[ai] &&
      +                (this.options.dot || !a[ai].startsWith('.')) &&
      +                a[ai] !== '**') {
      +                if (which === 'a')
      +                    return false;
      +                which = 'b';
      +                result.push(b[bi]);
      +                ai++;
      +                bi++;
      +            }
      +            else {
      +                return false;
      +            }
      +        }
      +        // if we fall out of the loop, it means they two are identical
      +        // as long as their lengths match
      +        return a.length === b.length && result;
      +    }
      +    parseNegate() {
      +        if (this.nonegate)
      +            return;
      +        const pattern = this.pattern;
      +        let negate = false;
      +        let negateOffset = 0;
      +        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
      +            negate = !negate;
      +            negateOffset++;
      +        }
      +        if (negateOffset)
      +            this.pattern = pattern.slice(negateOffset);
      +        this.negate = negate;
      +    }
      +    // set partial to true to test if, for example,
      +    // "/a/b" matches the start of "/*/b/*/d"
      +    // Partial means, if you run out of file before you run
      +    // out of pattern, then that's fine, as long as all
      +    // the parts match.
      +    matchOne(file, pattern, partial = false) {
      +        const options = this.options;
      +        // UNC paths like //?/X:/... can match X:/... and vice versa
      +        // Drive letters in absolute drive or unc paths are always compared
      +        // case-insensitively.
      +        if (this.isWindows) {
      +            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
      +            const fileUNC = !fileDrive &&
      +                file[0] === '' &&
      +                file[1] === '' &&
      +                file[2] === '?' &&
      +                /^[a-z]:$/i.test(file[3]);
      +            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
      +            const patternUNC = !patternDrive &&
      +                pattern[0] === '' &&
      +                pattern[1] === '' &&
      +                pattern[2] === '?' &&
      +                typeof pattern[3] === 'string' &&
      +                /^[a-z]:$/i.test(pattern[3]);
      +            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
      +            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
      +            if (typeof fdi === 'number' && typeof pdi === 'number') {
      +                const [fd, pd] = [file[fdi], pattern[pdi]];
      +                if (fd.toLowerCase() === pd.toLowerCase()) {
      +                    pattern[pdi] = fd;
      +                    if (pdi > fdi) {
      +                        pattern = pattern.slice(pdi);
      +                    }
      +                    else if (fdi > pdi) {
      +                        file = file.slice(fdi);
      +                    }
      +                }
      +            }
      +        }
      +        // resolve and reduce . and .. portions in the file as well.
      +        // dont' need to do the second phase, because it's only one string[]
      +        const { optimizationLevel = 1 } = this.options;
      +        if (optimizationLevel >= 2) {
      +            file = this.levelTwoFileOptimize(file);
      +        }
      +        this.debug('matchOne', this, { file, pattern });
      +        this.debug('matchOne', file.length, pattern.length);
      +        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
      +            this.debug('matchOne loop');
      +            var p = pattern[pi];
      +            var f = file[fi];
      +            this.debug(pattern, p, f);
      +            // should be impossible.
      +            // some invalid regexp stuff in the set.
      +            /* c8 ignore start */
      +            if (p === false) {
      +                return false;
      +            }
      +            /* c8 ignore stop */
      +            if (p === GLOBSTAR) {
      +                this.debug('GLOBSTAR', [pattern, p, f]);
      +                // "**"
      +                // a/**/b/**/c would match the following:
      +                // a/b/x/y/z/c
      +                // a/x/y/z/b/c
      +                // a/b/x/b/x/c
      +                // a/b/c
      +                // To do this, take the rest of the pattern after
      +                // the **, and see if it would match the file remainder.
      +                // If so, return success.
      +                // If not, the ** "swallows" a segment, and try again.
      +                // This is recursively awful.
      +                //
      +                // a/**/b/**/c matching a/b/x/y/z/c
      +                // - a matches a
      +                // - doublestar
      +                //   - matchOne(b/x/y/z/c, b/**/c)
      +                //     - b matches b
      +                //     - doublestar
      +                //       - matchOne(x/y/z/c, c) -> no
      +                //       - matchOne(y/z/c, c) -> no
      +                //       - matchOne(z/c, c) -> no
      +                //       - matchOne(c, c) yes, hit
      +                var fr = fi;
      +                var pr = pi + 1;
      +                if (pr === pl) {
      +                    this.debug('** at the end');
      +                    // a ** at the end will just swallow the rest.
      +                    // We have found a match.
      +                    // however, it will not swallow /.x, unless
      +                    // options.dot is set.
      +                    // . and .. are *never* matched by **, for explosively
      +                    // exponential reasons.
      +                    for (; fi < fl; fi++) {
      +                        if (file[fi] === '.' ||
      +                            file[fi] === '..' ||
      +                            (!options.dot && file[fi].charAt(0) === '.'))
      +                            return false;
      +                    }
      +                    return true;
      +                }
      +                // ok, let's see if we can swallow whatever we can.
      +                while (fr < fl) {
      +                    var swallowee = file[fr];
      +                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
      +                    // XXX remove this slice.  Just pass the start index.
      +                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
      +                        this.debug('globstar found match!', fr, fl, swallowee);
      +                        // found a match.
      +                        return true;
      +                    }
      +                    else {
      +                        // can't swallow "." or ".." ever.
      +                        // can only swallow ".foo" when explicitly asked.
      +                        if (swallowee === '.' ||
      +                            swallowee === '..' ||
      +                            (!options.dot && swallowee.charAt(0) === '.')) {
      +                            this.debug('dot detected!', file, fr, pattern, pr);
      +                            break;
      +                        }
      +                        // ** swallows a segment, and continue.
      +                        this.debug('globstar swallow a segment, and continue');
      +                        fr++;
      +                    }
      +                }
      +                // no match was found.
      +                // However, in partial mode, we can't say this is necessarily over.
      +                /* c8 ignore start */
      +                if (partial) {
      +                    // ran out of file
      +                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
      +                    if (fr === fl) {
      +                        return true;
      +                    }
      +                }
      +                /* c8 ignore stop */
      +                return false;
      +            }
      +            // something other than **
      +            // non-magic patterns just have to match exactly
      +            // patterns with magic have been turned into regexps.
      +            let hit;
      +            if (typeof p === 'string') {
      +                hit = f === p;
      +                this.debug('string match', p, f, hit);
      +            }
      +            else {
      +                hit = p.test(f);
      +                this.debug('pattern match', p, f, hit);
      +            }
      +            if (!hit)
      +                return false;
      +        }
      +        // Note: ending in / means that we'll get a final ""
      +        // at the end of the pattern.  This can only match a
      +        // corresponding "" at the end of the file.
      +        // If the file ends in /, then it can only match a
      +        // a pattern that ends in /, unless the pattern just
      +        // doesn't have any more for it. But, a/b/ should *not*
      +        // match "a/b/*", even though "" matches against the
      +        // [^/]*? pattern, except in partial mode, where it might
      +        // simply not be reached yet.
      +        // However, a/b/ should still satisfy a/*
      +        // now either we fell off the end of the pattern, or we're done.
      +        if (fi === fl && pi === pl) {
      +            // ran out of pattern and filename at the same time.
      +            // an exact hit!
      +            return true;
      +        }
      +        else if (fi === fl) {
      +            // ran out of file, but still had pattern left.
      +            // this is ok if we're doing the match as part of
      +            // a glob fs traversal.
      +            return partial;
      +        }
      +        else if (pi === pl) {
      +            // ran out of pattern, still have file left.
      +            // this is only acceptable if we're on the very last
      +            // empty segment of a file with a trailing slash.
      +            // a/* should match a/b/
      +            return fi === fl - 1 && file[fi] === '';
      +            /* c8 ignore start */
      +        }
      +        else {
      +            // should be unreachable.
      +            throw new Error('wtf?');
      +        }
      +        /* c8 ignore stop */
      +    }
      +    braceExpand() {
      +        return braceExpand(this.pattern, this.options);
      +    }
      +    parse(pattern) {
      +        assertValidPattern(pattern);
      +        const options = this.options;
      +        // shortcuts
      +        if (pattern === '**')
      +            return GLOBSTAR;
      +        if (pattern === '')
      +            return '';
      +        // far and away, the most common glob pattern parts are
      +        // *, *.*, and *.  Add a fast check method for those.
      +        let m;
      +        let fastTest = null;
      +        if ((m = pattern.match(starRE))) {
      +            fastTest = options.dot ? starTestDot : starTest;
      +        }
      +        else if ((m = pattern.match(starDotExtRE))) {
      +            fastTest = (options.nocase
      +                ? options.dot
      +                    ? starDotExtTestNocaseDot
      +                    : starDotExtTestNocase
      +                : options.dot
      +                    ? starDotExtTestDot
      +                    : starDotExtTest)(m[1]);
      +        }
      +        else if ((m = pattern.match(qmarksRE))) {
      +            fastTest = (options.nocase
      +                ? options.dot
      +                    ? qmarksTestNocaseDot
      +                    : qmarksTestNocase
      +                : options.dot
      +                    ? qmarksTestDot
      +                    : qmarksTest)(m);
      +        }
      +        else if ((m = pattern.match(starDotStarRE))) {
      +            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
      +        }
      +        else if ((m = pattern.match(dotStarRE))) {
      +            fastTest = dotStarTest;
      +        }
      +        const re = AST.fromGlob(pattern, this.options).toMMPattern();
      +        return fastTest ? Object.assign(re, { test: fastTest }) : re;
      +    }
      +    makeRe() {
      +        if (this.regexp || this.regexp === false)
      +            return this.regexp;
      +        // at this point, this.set is a 2d array of partial
      +        // pattern strings, or "**".
      +        //
      +        // It's better to use .match().  This function shouldn't
      +        // be used, really, but it's pretty convenient sometimes,
      +        // when you just want to work with a regex.
      +        const set = this.set;
      +        if (!set.length) {
      +            this.regexp = false;
      +            return this.regexp;
      +        }
      +        const options = this.options;
      +        const twoStar = options.noglobstar
      +            ? star
      +            : options.dot
      +                ? twoStarDot
      +                : twoStarNoDot;
      +        const flags = new Set(options.nocase ? ['i'] : []);
      +        // regexpify non-globstar patterns
      +        // if ** is only item, then we just do one twoStar
      +        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
      +        // if ** is last, append (\/twoStar|) to previous
      +        // if ** is in the middle, append (\/|\/twoStar\/) to previous
      +        // then filter out GLOBSTAR symbols
      +        let re = set
      +            .map(pattern => {
      +            const pp = pattern.map(p => {
      +                if (p instanceof RegExp) {
      +                    for (const f of p.flags.split(''))
      +                        flags.add(f);
      +                }
      +                return typeof p === 'string'
      +                    ? regExpEscape(p)
      +                    : p === GLOBSTAR
      +                        ? GLOBSTAR
      +                        : p._src;
      +            });
      +            pp.forEach((p, i) => {
      +                const next = pp[i + 1];
      +                const prev = pp[i - 1];
      +                if (p !== GLOBSTAR || prev === GLOBSTAR) {
      +                    return;
      +                }
      +                if (prev === undefined) {
      +                    if (next !== undefined && next !== GLOBSTAR) {
      +                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
      +                    }
      +                    else {
      +                        pp[i] = twoStar;
      +                    }
      +                }
      +                else if (next === undefined) {
      +                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
      +                }
      +                else if (next !== GLOBSTAR) {
      +                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
      +                    pp[i + 1] = GLOBSTAR;
      +                }
      +            });
      +            return pp.filter(p => p !== GLOBSTAR).join('/');
      +        })
      +            .join('|');
      +        // need to wrap in parens if we had more than one thing with |,
      +        // otherwise only the first will be anchored to ^ and the last to $
      +        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
      +        // must match entire pattern
      +        // ending in a * or ** will make it less strict.
      +        re = '^' + open + re + close + '$';
      +        // can match anything, as long as it's not this.
      +        if (this.negate)
      +            re = '^(?!' + re + ').+$';
      +        try {
      +            this.regexp = new RegExp(re, [...flags].join(''));
      +            /* c8 ignore start */
      +        }
      +        catch (ex) {
      +            // should be impossible
      +            this.regexp = false;
      +        }
      +        /* c8 ignore stop */
      +        return this.regexp;
      +    }
      +    slashSplit(p) {
      +        // if p starts with // on windows, we preserve that
      +        // so that UNC paths aren't broken.  Otherwise, any number of
      +        // / characters are coalesced into one, unless
      +        // preserveMultipleSlashes is set to true.
      +        if (this.preserveMultipleSlashes) {
      +            return p.split('/');
      +        }
      +        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
      +            // add an extra '' for the one we lose
      +            return ['', ...p.split(/\/+/)];
      +        }
      +        else {
      +            return p.split(/\/+/);
      +        }
      +    }
      +    match(f, partial = this.partial) {
      +        this.debug('match', f, this.pattern);
      +        // short-circuit in the case of busted things.
      +        // comments, etc.
      +        if (this.comment) {
      +            return false;
      +        }
      +        if (this.empty) {
      +            return f === '';
      +        }
      +        if (f === '/' && partial) {
      +            return true;
      +        }
      +        const options = this.options;
      +        // windows: need to use /, not \
      +        if (this.isWindows) {
      +            f = f.split('\\').join('/');
      +        }
      +        // treat the test path as a set of pathparts.
      +        const ff = this.slashSplit(f);
      +        this.debug(this.pattern, 'split', ff);
      +        // just ONE of the pattern sets in this.set needs to match
      +        // in order for it to be valid.  If negating, then just one
      +        // match means that we have failed.
      +        // Either way, return on the first hit.
      +        const set = this.set;
      +        this.debug(this.pattern, 'set', set);
      +        // Find the basename of the path by looking for the last non-empty segment
      +        let filename = ff[ff.length - 1];
      +        if (!filename) {
      +            for (let i = ff.length - 2; !filename && i >= 0; i--) {
      +                filename = ff[i];
      +            }
      +        }
      +        for (let i = 0; i < set.length; i++) {
      +            const pattern = set[i];
      +            let file = ff;
      +            if (options.matchBase && pattern.length === 1) {
      +                file = [filename];
      +            }
      +            const hit = this.matchOne(file, pattern, partial);
      +            if (hit) {
      +                if (options.flipNegate) {
      +                    return true;
      +                }
      +                return !this.negate;
      +            }
      +        }
      +        // didn't get any hits.  this is success if it's a negative
      +        // pattern, failure otherwise.
      +        if (options.flipNegate) {
      +            return false;
      +        }
      +        return this.negate;
      +    }
      +    static defaults(def) {
      +        return minimatch.defaults(def).Minimatch;
      +    }
      +}
      +/* c8 ignore start */
      +export { AST } from './ast.js';
      +export { escape } from './escape.js';
      +export { unescape } from './unescape.js';
      +/* c8 ignore stop */
      +minimatch.AST = AST;
      +minimatch.Minimatch = Minimatch;
      +minimatch.escape = escape;
      +minimatch.unescape = unescape;
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
      new file mode 100644
      index 00000000000000..3dbc1ca591c055
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "module"
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
      new file mode 100644
      index 00000000000000..0faf9a2b7306f7
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
      @@ -0,0 +1,20 @@
      +/**
      + * Un-escape a string that has been escaped with {@link escape}.
      + *
      + * If the {@link windowsPathsNoEscape} option is used, then square-brace
      + * escapes are removed, but not backslash escapes.  For example, it will turn
      + * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
      + * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
      + *
      + * When `windowsPathsNoEscape` is not set, then both brace escapes and
      + * backslash escapes are removed.
      + *
      + * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
      + * or unescaped.
      + */
      +export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
      +    return windowsPathsNoEscape
      +        ? s.replace(/\[([^\/\\])\]/g, '$1')
      +        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
      +};
      +//# sourceMappingURL=unescape.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
      new file mode 100644
      index 00000000000000..061c3b9f343306
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
      @@ -0,0 +1,86 @@
      +{
      +  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
      +  "name": "minimatch",
      +  "description": "a glob matcher in javascript",
      +  "version": "9.0.3",
      +  "repository": {
      +    "type": "git",
      +    "url": "git://github.com/isaacs/minimatch.git"
      +  },
      +  "main": "./dist/cjs/index.js",
      +  "module": "./dist/mjs/index.js",
      +  "types": "./dist/cjs/index.d.ts",
      +  "exports": {
      +    ".": {
      +      "import": {
      +        "types": "./dist/mjs/index.d.ts",
      +        "default": "./dist/mjs/index.js"
      +      },
      +      "require": {
      +        "types": "./dist/cjs/index.d.ts",
      +        "default": "./dist/cjs/index.js"
      +      }
      +    }
      +  },
      +  "files": [
      +    "dist"
      +  ],
      +  "scripts": {
      +    "preversion": "npm test",
      +    "postversion": "npm publish",
      +    "prepublishOnly": "git push origin --follow-tags",
      +    "preprepare": "rm -rf dist",
      +    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
      +    "postprepare": "bash fixup.sh",
      +    "pretest": "npm run prepare",
      +    "presnap": "npm run prepare",
      +    "test": "c8 tap",
      +    "snap": "c8 tap",
      +    "format": "prettier --write . --loglevel warn",
      +    "benchmark": "node benchmark/index.js",
      +    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
      +  },
      +  "prettier": {
      +    "semi": false,
      +    "printWidth": 80,
      +    "tabWidth": 2,
      +    "useTabs": false,
      +    "singleQuote": true,
      +    "jsxSingleQuote": false,
      +    "bracketSameLine": true,
      +    "arrowParens": "avoid",
      +    "endOfLine": "lf"
      +  },
      +  "engines": {
      +    "node": ">=16 || 14 >=14.17"
      +  },
      +  "dependencies": {
      +    "brace-expansion": "^2.0.1"
      +  },
      +  "devDependencies": {
      +    "@types/brace-expansion": "^1.1.0",
      +    "@types/node": "^18.15.11",
      +    "@types/tap": "^15.0.8",
      +    "c8": "^7.12.0",
      +    "eslint-config-prettier": "^8.6.0",
      +    "mkdirp": "1",
      +    "prettier": "^2.8.2",
      +    "tap": "^16.3.7",
      +    "ts-node": "^10.9.1",
      +    "typedoc": "^0.23.21",
      +    "typescript": "^4.9.3"
      +  },
      +  "tap": {
      +    "coverage": false,
      +    "node-arg": [
      +      "--no-warnings",
      +      "--loader",
      +      "ts-node/esm"
      +    ],
      +    "ts": false
      +  },
      +  "funding": {
      +    "url": "https://github.com/sponsors/isaacs"
      +  },
      +  "license": "ISC"
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
      new file mode 100644
      index 00000000000000..97f8e32ed82e4c
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
      @@ -0,0 +1,15 @@
      +The ISC License
      +
      +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
      +
      +Permission to use, copy, modify, and/or distribute this software for any
      +purpose with or without fee is hereby granted, provided that the above
      +copyright notice and this permission notice appear in all copies.
      +
      +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
      +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
      +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
      +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
      +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
      +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
      +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
      new file mode 100644
      index 00000000000000..b6cdae8eb514b8
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
      @@ -0,0 +1,1028 @@
      +"use strict";
      +var __importDefault = (this && this.__importDefault) || function (mod) {
      +    return (mod && mod.__esModule) ? mod : { "default": mod };
      +};
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
      +const proc = typeof process === 'object' && process
      +    ? process
      +    : {
      +        stdout: null,
      +        stderr: null,
      +    };
      +const events_1 = require("events");
      +const stream_1 = __importDefault(require("stream"));
      +const string_decoder_1 = require("string_decoder");
      +/**
      + * Return true if the argument is a Minipass stream, Node stream, or something
      + * else that Minipass can interact with.
      + */
      +const isStream = (s) => !!s &&
      +    typeof s === 'object' &&
      +    (s instanceof Minipass ||
      +        s instanceof stream_1.default ||
      +        (0, exports.isReadable)(s) ||
      +        (0, exports.isWritable)(s));
      +exports.isStream = isStream;
      +/**
      + * Return true if the argument is a valid {@link Minipass.Readable}
      + */
      +const isReadable = (s) => !!s &&
      +    typeof s === 'object' &&
      +    s instanceof events_1.EventEmitter &&
      +    typeof s.pipe === 'function' &&
      +    // node core Writable streams have a pipe() method, but it throws
      +    s.pipe !== stream_1.default.Writable.prototype.pipe;
      +exports.isReadable = isReadable;
      +/**
      + * Return true if the argument is a valid {@link Minipass.Writable}
      + */
      +const isWritable = (s) => !!s &&
      +    typeof s === 'object' &&
      +    s instanceof events_1.EventEmitter &&
      +    typeof s.write === 'function' &&
      +    typeof s.end === 'function';
      +exports.isWritable = isWritable;
      +const EOF = Symbol('EOF');
      +const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
      +const EMITTED_END = Symbol('emittedEnd');
      +const EMITTING_END = Symbol('emittingEnd');
      +const EMITTED_ERROR = Symbol('emittedError');
      +const CLOSED = Symbol('closed');
      +const READ = Symbol('read');
      +const FLUSH = Symbol('flush');
      +const FLUSHCHUNK = Symbol('flushChunk');
      +const ENCODING = Symbol('encoding');
      +const DECODER = Symbol('decoder');
      +const FLOWING = Symbol('flowing');
      +const PAUSED = Symbol('paused');
      +const RESUME = Symbol('resume');
      +const BUFFER = Symbol('buffer');
      +const PIPES = Symbol('pipes');
      +const BUFFERLENGTH = Symbol('bufferLength');
      +const BUFFERPUSH = Symbol('bufferPush');
      +const BUFFERSHIFT = Symbol('bufferShift');
      +const OBJECTMODE = Symbol('objectMode');
      +// internal event when stream is destroyed
      +const DESTROYED = Symbol('destroyed');
      +// internal event when stream has an error
      +const ERROR = Symbol('error');
      +const EMITDATA = Symbol('emitData');
      +const EMITEND = Symbol('emitEnd');
      +const EMITEND2 = Symbol('emitEnd2');
      +const ASYNC = Symbol('async');
      +const ABORT = Symbol('abort');
      +const ABORTED = Symbol('aborted');
      +const SIGNAL = Symbol('signal');
      +const DATALISTENERS = Symbol('dataListeners');
      +const DISCARDED = Symbol('discarded');
      +const defer = (fn) => Promise.resolve().then(fn);
      +const nodefer = (fn) => fn();
      +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
      +const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
      +    (!!b &&
      +        typeof b === 'object' &&
      +        b.constructor &&
      +        b.constructor.name === 'ArrayBuffer' &&
      +        b.byteLength >= 0);
      +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
      +/**
      + * Internal class representing a pipe to a destination stream.
      + *
      + * @internal
      + */
      +class Pipe {
      +    src;
      +    dest;
      +    opts;
      +    ondrain;
      +    constructor(src, dest, opts) {
      +        this.src = src;
      +        this.dest = dest;
      +        this.opts = opts;
      +        this.ondrain = () => src[RESUME]();
      +        this.dest.on('drain', this.ondrain);
      +    }
      +    unpipe() {
      +        this.dest.removeListener('drain', this.ondrain);
      +    }
      +    // only here for the prototype
      +    /* c8 ignore start */
      +    proxyErrors(_er) { }
      +    /* c8 ignore stop */
      +    end() {
      +        this.unpipe();
      +        if (this.opts.end)
      +            this.dest.end();
      +    }
      +}
      +/**
      + * Internal class representing a pipe to a destination stream where
      + * errors are proxied.
      + *
      + * @internal
      + */
      +class PipeProxyErrors extends Pipe {
      +    unpipe() {
      +        this.src.removeListener('error', this.proxyErrors);
      +        super.unpipe();
      +    }
      +    constructor(src, dest, opts) {
      +        super(src, dest, opts);
      +        this.proxyErrors = er => dest.emit('error', er);
      +        src.on('error', this.proxyErrors);
      +    }
      +}
      +const isObjectModeOptions = (o) => !!o.objectMode;
      +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
      +/**
      + * Main export, the Minipass class
      + *
      + * `RType` is the type of data emitted, defaults to Buffer
      + *
      + * `WType` is the type of data to be written, if RType is buffer or string,
      + * then any {@link Minipass.ContiguousData} is allowed.
      + *
      + * `Events` is the set of event handler signatures that this object
      + * will emit, see {@link Minipass.Events}
      + */
      +class Minipass extends events_1.EventEmitter {
      +    [FLOWING] = false;
      +    [PAUSED] = false;
      +    [PIPES] = [];
      +    [BUFFER] = [];
      +    [OBJECTMODE];
      +    [ENCODING];
      +    [ASYNC];
      +    [DECODER];
      +    [EOF] = false;
      +    [EMITTED_END] = false;
      +    [EMITTING_END] = false;
      +    [CLOSED] = false;
      +    [EMITTED_ERROR] = null;
      +    [BUFFERLENGTH] = 0;
      +    [DESTROYED] = false;
      +    [SIGNAL];
      +    [ABORTED] = false;
      +    [DATALISTENERS] = 0;
      +    [DISCARDED] = false;
      +    /**
      +     * true if the stream can be written
      +     */
      +    writable = true;
      +    /**
      +     * true if the stream can be read
      +     */
      +    readable = true;
      +    /**
      +     * If `RType` is Buffer, then options do not need to be provided.
      +     * Otherwise, an options object must be provided to specify either
      +     * {@link Minipass.SharedOptions.objectMode} or
      +     * {@link Minipass.SharedOptions.encoding}, as appropriate.
      +     */
      +    constructor(...args) {
      +        const options = (args[0] ||
      +            {});
      +        super();
      +        if (options.objectMode && typeof options.encoding === 'string') {
      +            throw new TypeError('Encoding and objectMode may not be used together');
      +        }
      +        if (isObjectModeOptions(options)) {
      +            this[OBJECTMODE] = true;
      +            this[ENCODING] = null;
      +        }
      +        else if (isEncodingOptions(options)) {
      +            this[ENCODING] = options.encoding;
      +            this[OBJECTMODE] = false;
      +        }
      +        else {
      +            this[OBJECTMODE] = false;
      +            this[ENCODING] = null;
      +        }
      +        this[ASYNC] = !!options.async;
      +        this[DECODER] = this[ENCODING]
      +            ? new string_decoder_1.StringDecoder(this[ENCODING])
      +            : null;
      +        //@ts-ignore - private option for debugging and testing
      +        if (options && options.debugExposeBuffer === true) {
      +            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
      +        }
      +        //@ts-ignore - private option for debugging and testing
      +        if (options && options.debugExposePipes === true) {
      +            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
      +        }
      +        const { signal } = options;
      +        if (signal) {
      +            this[SIGNAL] = signal;
      +            if (signal.aborted) {
      +                this[ABORT]();
      +            }
      +            else {
      +                signal.addEventListener('abort', () => this[ABORT]());
      +            }
      +        }
      +    }
      +    /**
      +     * The amount of data stored in the buffer waiting to be read.
      +     *
      +     * For Buffer strings, this will be the total byte length.
      +     * For string encoding streams, this will be the string character length,
      +     * according to JavaScript's `string.length` logic.
      +     * For objectMode streams, this is a count of the items waiting to be
      +     * emitted.
      +     */
      +    get bufferLength() {
      +        return this[BUFFERLENGTH];
      +    }
      +    /**
      +     * The `BufferEncoding` currently in use, or `null`
      +     */
      +    get encoding() {
      +        return this[ENCODING];
      +    }
      +    /**
      +     * @deprecated - This is a read only property
      +     */
      +    set encoding(_enc) {
      +        throw new Error('Encoding must be set at instantiation time');
      +    }
      +    /**
      +     * @deprecated - Encoding may only be set at instantiation time
      +     */
      +    setEncoding(_enc) {
      +        throw new Error('Encoding must be set at instantiation time');
      +    }
      +    /**
      +     * True if this is an objectMode stream
      +     */
      +    get objectMode() {
      +        return this[OBJECTMODE];
      +    }
      +    /**
      +     * @deprecated - This is a read-only property
      +     */
      +    set objectMode(_om) {
      +        throw new Error('objectMode must be set at instantiation time');
      +    }
      +    /**
      +     * true if this is an async stream
      +     */
      +    get ['async']() {
      +        return this[ASYNC];
      +    }
      +    /**
      +     * Set to true to make this stream async.
      +     *
      +     * Once set, it cannot be unset, as this would potentially cause incorrect
      +     * behavior.  Ie, a sync stream can be made async, but an async stream
      +     * cannot be safely made sync.
      +     */
      +    set ['async'](a) {
      +        this[ASYNC] = this[ASYNC] || !!a;
      +    }
      +    // drop everything and get out of the flow completely
      +    [ABORT]() {
      +        this[ABORTED] = true;
      +        this.emit('abort', this[SIGNAL]?.reason);
      +        this.destroy(this[SIGNAL]?.reason);
      +    }
      +    /**
      +     * True if the stream has been aborted.
      +     */
      +    get aborted() {
      +        return this[ABORTED];
      +    }
      +    /**
      +     * No-op setter. Stream aborted status is set via the AbortSignal provided
      +     * in the constructor options.
      +     */
      +    set aborted(_) { }
      +    write(chunk, encoding, cb) {
      +        if (this[ABORTED])
      +            return false;
      +        if (this[EOF])
      +            throw new Error('write after end');
      +        if (this[DESTROYED]) {
      +            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
      +            return true;
      +        }
      +        if (typeof encoding === 'function') {
      +            cb = encoding;
      +            encoding = 'utf8';
      +        }
      +        if (!encoding)
      +            encoding = 'utf8';
      +        const fn = this[ASYNC] ? defer : nodefer;
      +        // convert array buffers and typed array views into buffers
      +        // at some point in the future, we may want to do the opposite!
      +        // leave strings and buffers as-is
      +        // anything is only allowed if in object mode, so throw
      +        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
      +            if (isArrayBufferView(chunk)) {
      +                //@ts-ignore - sinful unsafe type changing
      +                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
      +            }
      +            else if (isArrayBufferLike(chunk)) {
      +                //@ts-ignore - sinful unsafe type changing
      +                chunk = Buffer.from(chunk);
      +            }
      +            else if (typeof chunk !== 'string') {
      +                throw new Error('Non-contiguous data written to non-objectMode stream');
      +            }
      +        }
      +        // handle object mode up front, since it's simpler
      +        // this yields better performance, fewer checks later.
      +        if (this[OBJECTMODE]) {
      +            // maybe impossible?
      +            /* c8 ignore start */
      +            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
      +                this[FLUSH](true);
      +            /* c8 ignore stop */
      +            if (this[FLOWING])
      +                this.emit('data', chunk);
      +            else
      +                this[BUFFERPUSH](chunk);
      +            if (this[BUFFERLENGTH] !== 0)
      +                this.emit('readable');
      +            if (cb)
      +                fn(cb);
      +            return this[FLOWING];
      +        }
      +        // at this point the chunk is a buffer or string
      +        // don't buffer it up or send it to the decoder
      +        if (!chunk.length) {
      +            if (this[BUFFERLENGTH] !== 0)
      +                this.emit('readable');
      +            if (cb)
      +                fn(cb);
      +            return this[FLOWING];
      +        }
      +        // fast-path writing strings of same encoding to a stream with
      +        // an empty buffer, skipping the buffer/decoder dance
      +        if (typeof chunk === 'string' &&
      +            // unless it is a string already ready for us to use
      +            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
      +            //@ts-ignore - sinful unsafe type change
      +            chunk = Buffer.from(chunk, encoding);
      +        }
      +        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
      +            //@ts-ignore - sinful unsafe type change
      +            chunk = this[DECODER].write(chunk);
      +        }
      +        // Note: flushing CAN potentially switch us into not-flowing mode
      +        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
      +            this[FLUSH](true);
      +        if (this[FLOWING])
      +            this.emit('data', chunk);
      +        else
      +            this[BUFFERPUSH](chunk);
      +        if (this[BUFFERLENGTH] !== 0)
      +            this.emit('readable');
      +        if (cb)
      +            fn(cb);
      +        return this[FLOWING];
      +    }
      +    /**
      +     * Low-level explicit read method.
      +     *
      +     * In objectMode, the argument is ignored, and one item is returned if
      +     * available.
      +     *
      +     * `n` is the number of bytes (or in the case of encoding streams,
      +     * characters) to consume. If `n` is not provided, then the entire buffer
      +     * is returned, or `null` is returned if no data is available.
      +     *
      +     * If `n` is greater that the amount of data in the internal buffer,
      +     * then `null` is returned.
      +     */
      +    read(n) {
      +        if (this[DESTROYED])
      +            return null;
      +        this[DISCARDED] = false;
      +        if (this[BUFFERLENGTH] === 0 ||
      +            n === 0 ||
      +            (n && n > this[BUFFERLENGTH])) {
      +            this[MAYBE_EMIT_END]();
      +            return null;
      +        }
      +        if (this[OBJECTMODE])
      +            n = null;
      +        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
      +            // not object mode, so if we have an encoding, then RType is string
      +            // otherwise, must be Buffer
      +            this[BUFFER] = [
      +                (this[ENCODING]
      +                    ? this[BUFFER].join('')
      +                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
      +            ];
      +        }
      +        const ret = this[READ](n || null, this[BUFFER][0]);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [READ](n, chunk) {
      +        if (this[OBJECTMODE])
      +            this[BUFFERSHIFT]();
      +        else {
      +            const c = chunk;
      +            if (n === c.length || n === null)
      +                this[BUFFERSHIFT]();
      +            else if (typeof c === 'string') {
      +                this[BUFFER][0] = c.slice(n);
      +                chunk = c.slice(0, n);
      +                this[BUFFERLENGTH] -= n;
      +            }
      +            else {
      +                this[BUFFER][0] = c.subarray(n);
      +                chunk = c.subarray(0, n);
      +                this[BUFFERLENGTH] -= n;
      +            }
      +        }
      +        this.emit('data', chunk);
      +        if (!this[BUFFER].length && !this[EOF])
      +            this.emit('drain');
      +        return chunk;
      +    }
      +    end(chunk, encoding, cb) {
      +        if (typeof chunk === 'function') {
      +            cb = chunk;
      +            chunk = undefined;
      +        }
      +        if (typeof encoding === 'function') {
      +            cb = encoding;
      +            encoding = 'utf8';
      +        }
      +        if (chunk !== undefined)
      +            this.write(chunk, encoding);
      +        if (cb)
      +            this.once('end', cb);
      +        this[EOF] = true;
      +        this.writable = false;
      +        // if we haven't written anything, then go ahead and emit,
      +        // even if we're not reading.
      +        // we'll re-emit if a new 'end' listener is added anyway.
      +        // This makes MP more suitable to write-only use cases.
      +        if (this[FLOWING] || !this[PAUSED])
      +            this[MAYBE_EMIT_END]();
      +        return this;
      +    }
      +    // don't let the internal resume be overwritten
      +    [RESUME]() {
      +        if (this[DESTROYED])
      +            return;
      +        if (!this[DATALISTENERS] && !this[PIPES].length) {
      +            this[DISCARDED] = true;
      +        }
      +        this[PAUSED] = false;
      +        this[FLOWING] = true;
      +        this.emit('resume');
      +        if (this[BUFFER].length)
      +            this[FLUSH]();
      +        else if (this[EOF])
      +            this[MAYBE_EMIT_END]();
      +        else
      +            this.emit('drain');
      +    }
      +    /**
      +     * Resume the stream if it is currently in a paused state
      +     *
      +     * If called when there are no pipe destinations or `data` event listeners,
      +     * this will place the stream in a "discarded" state, where all data will
      +     * be thrown away. The discarded state is removed if a pipe destination or
      +     * data handler is added, if pause() is called, or if any synchronous or
      +     * asynchronous iteration is started.
      +     */
      +    resume() {
      +        return this[RESUME]();
      +    }
      +    /**
      +     * Pause the stream
      +     */
      +    pause() {
      +        this[FLOWING] = false;
      +        this[PAUSED] = true;
      +        this[DISCARDED] = false;
      +    }
      +    /**
      +     * true if the stream has been forcibly destroyed
      +     */
      +    get destroyed() {
      +        return this[DESTROYED];
      +    }
      +    /**
      +     * true if the stream is currently in a flowing state, meaning that
      +     * any writes will be immediately emitted.
      +     */
      +    get flowing() {
      +        return this[FLOWING];
      +    }
      +    /**
      +     * true if the stream is currently in a paused state
      +     */
      +    get paused() {
      +        return this[PAUSED];
      +    }
      +    [BUFFERPUSH](chunk) {
      +        if (this[OBJECTMODE])
      +            this[BUFFERLENGTH] += 1;
      +        else
      +            this[BUFFERLENGTH] += chunk.length;
      +        this[BUFFER].push(chunk);
      +    }
      +    [BUFFERSHIFT]() {
      +        if (this[OBJECTMODE])
      +            this[BUFFERLENGTH] -= 1;
      +        else
      +            this[BUFFERLENGTH] -= this[BUFFER][0].length;
      +        return this[BUFFER].shift();
      +    }
      +    [FLUSH](noDrain = false) {
      +        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
      +            this[BUFFER].length);
      +        if (!noDrain && !this[BUFFER].length && !this[EOF])
      +            this.emit('drain');
      +    }
      +    [FLUSHCHUNK](chunk) {
      +        this.emit('data', chunk);
      +        return this[FLOWING];
      +    }
      +    /**
      +     * Pipe all data emitted by this stream into the destination provided.
      +     *
      +     * Triggers the flow of data.
      +     */
      +    pipe(dest, opts) {
      +        if (this[DESTROYED])
      +            return dest;
      +        this[DISCARDED] = false;
      +        const ended = this[EMITTED_END];
      +        opts = opts || {};
      +        if (dest === proc.stdout || dest === proc.stderr)
      +            opts.end = false;
      +        else
      +            opts.end = opts.end !== false;
      +        opts.proxyErrors = !!opts.proxyErrors;
      +        // piping an ended stream ends immediately
      +        if (ended) {
      +            if (opts.end)
      +                dest.end();
      +        }
      +        else {
      +            // "as" here just ignores the WType, which pipes don't care about,
      +            // since they're only consuming from us, and writing to the dest
      +            this[PIPES].push(!opts.proxyErrors
      +                ? new Pipe(this, dest, opts)
      +                : new PipeProxyErrors(this, dest, opts));
      +            if (this[ASYNC])
      +                defer(() => this[RESUME]());
      +            else
      +                this[RESUME]();
      +        }
      +        return dest;
      +    }
      +    /**
      +     * Fully unhook a piped destination stream.
      +     *
      +     * If the destination stream was the only consumer of this stream (ie,
      +     * there are no other piped destinations or `'data'` event listeners)
      +     * then the flow of data will stop until there is another consumer or
      +     * {@link Minipass#resume} is explicitly called.
      +     */
      +    unpipe(dest) {
      +        const p = this[PIPES].find(p => p.dest === dest);
      +        if (p) {
      +            if (this[PIPES].length === 1) {
      +                if (this[FLOWING] && this[DATALISTENERS] === 0) {
      +                    this[FLOWING] = false;
      +                }
      +                this[PIPES] = [];
      +            }
      +            else
      +                this[PIPES].splice(this[PIPES].indexOf(p), 1);
      +            p.unpipe();
      +        }
      +    }
      +    /**
      +     * Alias for {@link Minipass#on}
      +     */
      +    addListener(ev, handler) {
      +        return this.on(ev, handler);
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.on`, with the following
      +     * behavior differences to prevent data loss and unnecessary hangs:
      +     *
      +     * - Adding a 'data' event handler will trigger the flow of data
      +     *
      +     * - Adding a 'readable' event handler when there is data waiting to be read
      +     *   will cause 'readable' to be emitted immediately.
      +     *
      +     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
      +     *   already passed will cause the event to be emitted immediately and all
      +     *   handlers removed.
      +     *
      +     * - Adding an 'error' event handler after an error has been emitted will
      +     *   cause the event to be re-emitted immediately with the error previously
      +     *   raised.
      +     */
      +    on(ev, handler) {
      +        const ret = super.on(ev, handler);
      +        if (ev === 'data') {
      +            this[DISCARDED] = false;
      +            this[DATALISTENERS]++;
      +            if (!this[PIPES].length && !this[FLOWING]) {
      +                this[RESUME]();
      +            }
      +        }
      +        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
      +            super.emit('readable');
      +        }
      +        else if (isEndish(ev) && this[EMITTED_END]) {
      +            super.emit(ev);
      +            this.removeAllListeners(ev);
      +        }
      +        else if (ev === 'error' && this[EMITTED_ERROR]) {
      +            const h = handler;
      +            if (this[ASYNC])
      +                defer(() => h.call(this, this[EMITTED_ERROR]));
      +            else
      +                h.call(this, this[EMITTED_ERROR]);
      +        }
      +        return ret;
      +    }
      +    /**
      +     * Alias for {@link Minipass#off}
      +     */
      +    removeListener(ev, handler) {
      +        return this.off(ev, handler);
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.off`
      +     *
      +     * If a 'data' event handler is removed, and it was the last consumer
      +     * (ie, there are no pipe destinations or other 'data' event listeners),
      +     * then the flow of data will stop until there is another consumer or
      +     * {@link Minipass#resume} is explicitly called.
      +     */
      +    off(ev, handler) {
      +        const ret = super.off(ev, handler);
      +        // if we previously had listeners, and now we don't, and we don't
      +        // have any pipes, then stop the flow, unless it's been explicitly
      +        // put in a discarded flowing state via stream.resume().
      +        if (ev === 'data') {
      +            this[DATALISTENERS] = this.listeners('data').length;
      +            if (this[DATALISTENERS] === 0 &&
      +                !this[DISCARDED] &&
      +                !this[PIPES].length) {
      +                this[FLOWING] = false;
      +            }
      +        }
      +        return ret;
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.removeAllListeners`
      +     *
      +     * If all 'data' event handlers are removed, and they were the last consumer
      +     * (ie, there are no pipe destinations), then the flow of data will stop
      +     * until there is another consumer or {@link Minipass#resume} is explicitly
      +     * called.
      +     */
      +    removeAllListeners(ev) {
      +        const ret = super.removeAllListeners(ev);
      +        if (ev === 'data' || ev === undefined) {
      +            this[DATALISTENERS] = 0;
      +            if (!this[DISCARDED] && !this[PIPES].length) {
      +                this[FLOWING] = false;
      +            }
      +        }
      +        return ret;
      +    }
      +    /**
      +     * true if the 'end' event has been emitted
      +     */
      +    get emittedEnd() {
      +        return this[EMITTED_END];
      +    }
      +    [MAYBE_EMIT_END]() {
      +        if (!this[EMITTING_END] &&
      +            !this[EMITTED_END] &&
      +            !this[DESTROYED] &&
      +            this[BUFFER].length === 0 &&
      +            this[EOF]) {
      +            this[EMITTING_END] = true;
      +            this.emit('end');
      +            this.emit('prefinish');
      +            this.emit('finish');
      +            if (this[CLOSED])
      +                this.emit('close');
      +            this[EMITTING_END] = false;
      +        }
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.emit`, with the following
      +     * behavior differences to prevent data loss and unnecessary hangs:
      +     *
      +     * If the stream has been destroyed, and the event is something other
      +     * than 'close' or 'error', then `false` is returned and no handlers
      +     * are called.
      +     *
      +     * If the event is 'end', and has already been emitted, then the event
      +     * is ignored. If the stream is in a paused or non-flowing state, then
      +     * the event will be deferred until data flow resumes. If the stream is
      +     * async, then handlers will be called on the next tick rather than
      +     * immediately.
      +     *
      +     * If the event is 'close', and 'end' has not yet been emitted, then
      +     * the event will be deferred until after 'end' is emitted.
      +     *
      +     * If the event is 'error', and an AbortSignal was provided for the stream,
      +     * and there are no listeners, then the event is ignored, matching the
      +     * behavior of node core streams in the presense of an AbortSignal.
      +     *
      +     * If the event is 'finish' or 'prefinish', then all listeners will be
      +     * removed after emitting the event, to prevent double-firing.
      +     */
      +    emit(ev, ...args) {
      +        const data = args[0];
      +        // error and close are only events allowed after calling destroy()
      +        if (ev !== 'error' &&
      +            ev !== 'close' &&
      +            ev !== DESTROYED &&
      +            this[DESTROYED]) {
      +            return false;
      +        }
      +        else if (ev === 'data') {
      +            return !this[OBJECTMODE] && !data
      +                ? false
      +                : this[ASYNC]
      +                    ? (defer(() => this[EMITDATA](data)), true)
      +                    : this[EMITDATA](data);
      +        }
      +        else if (ev === 'end') {
      +            return this[EMITEND]();
      +        }
      +        else if (ev === 'close') {
      +            this[CLOSED] = true;
      +            // don't emit close before 'end' and 'finish'
      +            if (!this[EMITTED_END] && !this[DESTROYED])
      +                return false;
      +            const ret = super.emit('close');
      +            this.removeAllListeners('close');
      +            return ret;
      +        }
      +        else if (ev === 'error') {
      +            this[EMITTED_ERROR] = data;
      +            super.emit(ERROR, data);
      +            const ret = !this[SIGNAL] || this.listeners('error').length
      +                ? super.emit('error', data)
      +                : false;
      +            this[MAYBE_EMIT_END]();
      +            return ret;
      +        }
      +        else if (ev === 'resume') {
      +            const ret = super.emit('resume');
      +            this[MAYBE_EMIT_END]();
      +            return ret;
      +        }
      +        else if (ev === 'finish' || ev === 'prefinish') {
      +            const ret = super.emit(ev);
      +            this.removeAllListeners(ev);
      +            return ret;
      +        }
      +        // Some other unknown event
      +        const ret = super.emit(ev, ...args);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [EMITDATA](data) {
      +        for (const p of this[PIPES]) {
      +            if (p.dest.write(data) === false)
      +                this.pause();
      +        }
      +        const ret = this[DISCARDED] ? false : super.emit('data', data);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [EMITEND]() {
      +        if (this[EMITTED_END])
      +            return false;
      +        this[EMITTED_END] = true;
      +        this.readable = false;
      +        return this[ASYNC]
      +            ? (defer(() => this[EMITEND2]()), true)
      +            : this[EMITEND2]();
      +    }
      +    [EMITEND2]() {
      +        if (this[DECODER]) {
      +            const data = this[DECODER].end();
      +            if (data) {
      +                for (const p of this[PIPES]) {
      +                    p.dest.write(data);
      +                }
      +                if (!this[DISCARDED])
      +                    super.emit('data', data);
      +            }
      +        }
      +        for (const p of this[PIPES]) {
      +            p.end();
      +        }
      +        const ret = super.emit('end');
      +        this.removeAllListeners('end');
      +        return ret;
      +    }
      +    /**
      +     * Return a Promise that resolves to an array of all emitted data once
      +     * the stream ends.
      +     */
      +    async collect() {
      +        const buf = Object.assign([], {
      +            dataLength: 0,
      +        });
      +        if (!this[OBJECTMODE])
      +            buf.dataLength = 0;
      +        // set the promise first, in case an error is raised
      +        // by triggering the flow here.
      +        const p = this.promise();
      +        this.on('data', c => {
      +            buf.push(c);
      +            if (!this[OBJECTMODE])
      +                buf.dataLength += c.length;
      +        });
      +        await p;
      +        return buf;
      +    }
      +    /**
      +     * Return a Promise that resolves to the concatenation of all emitted data
      +     * once the stream ends.
      +     *
      +     * Not allowed on objectMode streams.
      +     */
      +    async concat() {
      +        if (this[OBJECTMODE]) {
      +            throw new Error('cannot concat in objectMode');
      +        }
      +        const buf = await this.collect();
      +        return (this[ENCODING]
      +            ? buf.join('')
      +            : Buffer.concat(buf, buf.dataLength));
      +    }
      +    /**
      +     * Return a void Promise that resolves once the stream ends.
      +     */
      +    async promise() {
      +        return new Promise((resolve, reject) => {
      +            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
      +            this.on('error', er => reject(er));
      +            this.on('end', () => resolve());
      +        });
      +    }
      +    /**
      +     * Asynchronous `for await of` iteration.
      +     *
      +     * This will continue emitting all chunks until the stream terminates.
      +     */
      +    [Symbol.asyncIterator]() {
      +        // set this up front, in case the consumer doesn't call next()
      +        // right away.
      +        this[DISCARDED] = false;
      +        let stopped = false;
      +        const stop = async () => {
      +            this.pause();
      +            stopped = true;
      +            return { value: undefined, done: true };
      +        };
      +        const next = () => {
      +            if (stopped)
      +                return stop();
      +            const res = this.read();
      +            if (res !== null)
      +                return Promise.resolve({ done: false, value: res });
      +            if (this[EOF])
      +                return stop();
      +            let resolve;
      +            let reject;
      +            const onerr = (er) => {
      +                this.off('data', ondata);
      +                this.off('end', onend);
      +                this.off(DESTROYED, ondestroy);
      +                stop();
      +                reject(er);
      +            };
      +            const ondata = (value) => {
      +                this.off('error', onerr);
      +                this.off('end', onend);
      +                this.off(DESTROYED, ondestroy);
      +                this.pause();
      +                resolve({ value, done: !!this[EOF] });
      +            };
      +            const onend = () => {
      +                this.off('error', onerr);
      +                this.off('data', ondata);
      +                this.off(DESTROYED, ondestroy);
      +                stop();
      +                resolve({ done: true, value: undefined });
      +            };
      +            const ondestroy = () => onerr(new Error('stream destroyed'));
      +            return new Promise((res, rej) => {
      +                reject = rej;
      +                resolve = res;
      +                this.once(DESTROYED, ondestroy);
      +                this.once('error', onerr);
      +                this.once('end', onend);
      +                this.once('data', ondata);
      +            });
      +        };
      +        return {
      +            next,
      +            throw: stop,
      +            return: stop,
      +            [Symbol.asyncIterator]() {
      +                return this;
      +            },
      +        };
      +    }
      +    /**
      +     * Synchronous `for of` iteration.
      +     *
      +     * The iteration will terminate when the internal buffer runs out, even
      +     * if the stream has not yet terminated.
      +     */
      +    [Symbol.iterator]() {
      +        // set this up front, in case the consumer doesn't call next()
      +        // right away.
      +        this[DISCARDED] = false;
      +        let stopped = false;
      +        const stop = () => {
      +            this.pause();
      +            this.off(ERROR, stop);
      +            this.off(DESTROYED, stop);
      +            this.off('end', stop);
      +            stopped = true;
      +            return { done: true, value: undefined };
      +        };
      +        const next = () => {
      +            if (stopped)
      +                return stop();
      +            const value = this.read();
      +            return value === null ? stop() : { done: false, value };
      +        };
      +        this.once('end', stop);
      +        this.once(ERROR, stop);
      +        this.once(DESTROYED, stop);
      +        return {
      +            next,
      +            throw: stop,
      +            return: stop,
      +            [Symbol.iterator]() {
      +                return this;
      +            },
      +        };
      +    }
      +    /**
      +     * Destroy a stream, preventing it from being used for any further purpose.
      +     *
      +     * If the stream has a `close()` method, then it will be called on
      +     * destruction.
      +     *
      +     * After destruction, any attempt to write data, read data, or emit most
      +     * events will be ignored.
      +     *
      +     * If an error argument is provided, then it will be emitted in an
      +     * 'error' event.
      +     */
      +    destroy(er) {
      +        if (this[DESTROYED]) {
      +            if (er)
      +                this.emit('error', er);
      +            else
      +                this.emit(DESTROYED);
      +            return this;
      +        }
      +        this[DESTROYED] = true;
      +        this[DISCARDED] = true;
      +        // throw away all buffered data, it's never coming out
      +        this[BUFFER].length = 0;
      +        this[BUFFERLENGTH] = 0;
      +        const wc = this;
      +        if (typeof wc.close === 'function' && !this[CLOSED])
      +            wc.close();
      +        if (er)
      +            this.emit('error', er);
      +        // if no error to emit, still reject pending promises
      +        else
      +            this.emit(DESTROYED);
      +        return this;
      +    }
      +    /**
      +     * Alias for {@link isStream}
      +     *
      +     * Former export location, maintained for backwards compatibility.
      +     *
      +     * @deprecated
      +     */
      +    static get isStream() {
      +        return exports.isStream;
      +    }
      +}
      +exports.Minipass = Minipass;
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
      new file mode 100644
      index 00000000000000..5bbefffbabee39
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "commonjs"
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
      new file mode 100644
      index 00000000000000..b65fafbae43a4e
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
      @@ -0,0 +1,1018 @@
      +const proc = typeof process === 'object' && process
      +    ? process
      +    : {
      +        stdout: null,
      +        stderr: null,
      +    };
      +import { EventEmitter } from 'events';
      +import Stream from 'stream';
      +import { StringDecoder } from 'string_decoder';
      +/**
      + * Return true if the argument is a Minipass stream, Node stream, or something
      + * else that Minipass can interact with.
      + */
      +export const isStream = (s) => !!s &&
      +    typeof s === 'object' &&
      +    (s instanceof Minipass ||
      +        s instanceof Stream ||
      +        isReadable(s) ||
      +        isWritable(s));
      +/**
      + * Return true if the argument is a valid {@link Minipass.Readable}
      + */
      +export const isReadable = (s) => !!s &&
      +    typeof s === 'object' &&
      +    s instanceof EventEmitter &&
      +    typeof s.pipe === 'function' &&
      +    // node core Writable streams have a pipe() method, but it throws
      +    s.pipe !== Stream.Writable.prototype.pipe;
      +/**
      + * Return true if the argument is a valid {@link Minipass.Writable}
      + */
      +export const isWritable = (s) => !!s &&
      +    typeof s === 'object' &&
      +    s instanceof EventEmitter &&
      +    typeof s.write === 'function' &&
      +    typeof s.end === 'function';
      +const EOF = Symbol('EOF');
      +const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
      +const EMITTED_END = Symbol('emittedEnd');
      +const EMITTING_END = Symbol('emittingEnd');
      +const EMITTED_ERROR = Symbol('emittedError');
      +const CLOSED = Symbol('closed');
      +const READ = Symbol('read');
      +const FLUSH = Symbol('flush');
      +const FLUSHCHUNK = Symbol('flushChunk');
      +const ENCODING = Symbol('encoding');
      +const DECODER = Symbol('decoder');
      +const FLOWING = Symbol('flowing');
      +const PAUSED = Symbol('paused');
      +const RESUME = Symbol('resume');
      +const BUFFER = Symbol('buffer');
      +const PIPES = Symbol('pipes');
      +const BUFFERLENGTH = Symbol('bufferLength');
      +const BUFFERPUSH = Symbol('bufferPush');
      +const BUFFERSHIFT = Symbol('bufferShift');
      +const OBJECTMODE = Symbol('objectMode');
      +// internal event when stream is destroyed
      +const DESTROYED = Symbol('destroyed');
      +// internal event when stream has an error
      +const ERROR = Symbol('error');
      +const EMITDATA = Symbol('emitData');
      +const EMITEND = Symbol('emitEnd');
      +const EMITEND2 = Symbol('emitEnd2');
      +const ASYNC = Symbol('async');
      +const ABORT = Symbol('abort');
      +const ABORTED = Symbol('aborted');
      +const SIGNAL = Symbol('signal');
      +const DATALISTENERS = Symbol('dataListeners');
      +const DISCARDED = Symbol('discarded');
      +const defer = (fn) => Promise.resolve().then(fn);
      +const nodefer = (fn) => fn();
      +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
      +const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
      +    (!!b &&
      +        typeof b === 'object' &&
      +        b.constructor &&
      +        b.constructor.name === 'ArrayBuffer' &&
      +        b.byteLength >= 0);
      +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
      +/**
      + * Internal class representing a pipe to a destination stream.
      + *
      + * @internal
      + */
      +class Pipe {
      +    src;
      +    dest;
      +    opts;
      +    ondrain;
      +    constructor(src, dest, opts) {
      +        this.src = src;
      +        this.dest = dest;
      +        this.opts = opts;
      +        this.ondrain = () => src[RESUME]();
      +        this.dest.on('drain', this.ondrain);
      +    }
      +    unpipe() {
      +        this.dest.removeListener('drain', this.ondrain);
      +    }
      +    // only here for the prototype
      +    /* c8 ignore start */
      +    proxyErrors(_er) { }
      +    /* c8 ignore stop */
      +    end() {
      +        this.unpipe();
      +        if (this.opts.end)
      +            this.dest.end();
      +    }
      +}
      +/**
      + * Internal class representing a pipe to a destination stream where
      + * errors are proxied.
      + *
      + * @internal
      + */
      +class PipeProxyErrors extends Pipe {
      +    unpipe() {
      +        this.src.removeListener('error', this.proxyErrors);
      +        super.unpipe();
      +    }
      +    constructor(src, dest, opts) {
      +        super(src, dest, opts);
      +        this.proxyErrors = er => dest.emit('error', er);
      +        src.on('error', this.proxyErrors);
      +    }
      +}
      +const isObjectModeOptions = (o) => !!o.objectMode;
      +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
      +/**
      + * Main export, the Minipass class
      + *
      + * `RType` is the type of data emitted, defaults to Buffer
      + *
      + * `WType` is the type of data to be written, if RType is buffer or string,
      + * then any {@link Minipass.ContiguousData} is allowed.
      + *
      + * `Events` is the set of event handler signatures that this object
      + * will emit, see {@link Minipass.Events}
      + */
      +export class Minipass extends EventEmitter {
      +    [FLOWING] = false;
      +    [PAUSED] = false;
      +    [PIPES] = [];
      +    [BUFFER] = [];
      +    [OBJECTMODE];
      +    [ENCODING];
      +    [ASYNC];
      +    [DECODER];
      +    [EOF] = false;
      +    [EMITTED_END] = false;
      +    [EMITTING_END] = false;
      +    [CLOSED] = false;
      +    [EMITTED_ERROR] = null;
      +    [BUFFERLENGTH] = 0;
      +    [DESTROYED] = false;
      +    [SIGNAL];
      +    [ABORTED] = false;
      +    [DATALISTENERS] = 0;
      +    [DISCARDED] = false;
      +    /**
      +     * true if the stream can be written
      +     */
      +    writable = true;
      +    /**
      +     * true if the stream can be read
      +     */
      +    readable = true;
      +    /**
      +     * If `RType` is Buffer, then options do not need to be provided.
      +     * Otherwise, an options object must be provided to specify either
      +     * {@link Minipass.SharedOptions.objectMode} or
      +     * {@link Minipass.SharedOptions.encoding}, as appropriate.
      +     */
      +    constructor(...args) {
      +        const options = (args[0] ||
      +            {});
      +        super();
      +        if (options.objectMode && typeof options.encoding === 'string') {
      +            throw new TypeError('Encoding and objectMode may not be used together');
      +        }
      +        if (isObjectModeOptions(options)) {
      +            this[OBJECTMODE] = true;
      +            this[ENCODING] = null;
      +        }
      +        else if (isEncodingOptions(options)) {
      +            this[ENCODING] = options.encoding;
      +            this[OBJECTMODE] = false;
      +        }
      +        else {
      +            this[OBJECTMODE] = false;
      +            this[ENCODING] = null;
      +        }
      +        this[ASYNC] = !!options.async;
      +        this[DECODER] = this[ENCODING]
      +            ? new StringDecoder(this[ENCODING])
      +            : null;
      +        //@ts-ignore - private option for debugging and testing
      +        if (options && options.debugExposeBuffer === true) {
      +            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
      +        }
      +        //@ts-ignore - private option for debugging and testing
      +        if (options && options.debugExposePipes === true) {
      +            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
      +        }
      +        const { signal } = options;
      +        if (signal) {
      +            this[SIGNAL] = signal;
      +            if (signal.aborted) {
      +                this[ABORT]();
      +            }
      +            else {
      +                signal.addEventListener('abort', () => this[ABORT]());
      +            }
      +        }
      +    }
      +    /**
      +     * The amount of data stored in the buffer waiting to be read.
      +     *
      +     * For Buffer strings, this will be the total byte length.
      +     * For string encoding streams, this will be the string character length,
      +     * according to JavaScript's `string.length` logic.
      +     * For objectMode streams, this is a count of the items waiting to be
      +     * emitted.
      +     */
      +    get bufferLength() {
      +        return this[BUFFERLENGTH];
      +    }
      +    /**
      +     * The `BufferEncoding` currently in use, or `null`
      +     */
      +    get encoding() {
      +        return this[ENCODING];
      +    }
      +    /**
      +     * @deprecated - This is a read only property
      +     */
      +    set encoding(_enc) {
      +        throw new Error('Encoding must be set at instantiation time');
      +    }
      +    /**
      +     * @deprecated - Encoding may only be set at instantiation time
      +     */
      +    setEncoding(_enc) {
      +        throw new Error('Encoding must be set at instantiation time');
      +    }
      +    /**
      +     * True if this is an objectMode stream
      +     */
      +    get objectMode() {
      +        return this[OBJECTMODE];
      +    }
      +    /**
      +     * @deprecated - This is a read-only property
      +     */
      +    set objectMode(_om) {
      +        throw new Error('objectMode must be set at instantiation time');
      +    }
      +    /**
      +     * true if this is an async stream
      +     */
      +    get ['async']() {
      +        return this[ASYNC];
      +    }
      +    /**
      +     * Set to true to make this stream async.
      +     *
      +     * Once set, it cannot be unset, as this would potentially cause incorrect
      +     * behavior.  Ie, a sync stream can be made async, but an async stream
      +     * cannot be safely made sync.
      +     */
      +    set ['async'](a) {
      +        this[ASYNC] = this[ASYNC] || !!a;
      +    }
      +    // drop everything and get out of the flow completely
      +    [ABORT]() {
      +        this[ABORTED] = true;
      +        this.emit('abort', this[SIGNAL]?.reason);
      +        this.destroy(this[SIGNAL]?.reason);
      +    }
      +    /**
      +     * True if the stream has been aborted.
      +     */
      +    get aborted() {
      +        return this[ABORTED];
      +    }
      +    /**
      +     * No-op setter. Stream aborted status is set via the AbortSignal provided
      +     * in the constructor options.
      +     */
      +    set aborted(_) { }
      +    write(chunk, encoding, cb) {
      +        if (this[ABORTED])
      +            return false;
      +        if (this[EOF])
      +            throw new Error('write after end');
      +        if (this[DESTROYED]) {
      +            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
      +            return true;
      +        }
      +        if (typeof encoding === 'function') {
      +            cb = encoding;
      +            encoding = 'utf8';
      +        }
      +        if (!encoding)
      +            encoding = 'utf8';
      +        const fn = this[ASYNC] ? defer : nodefer;
      +        // convert array buffers and typed array views into buffers
      +        // at some point in the future, we may want to do the opposite!
      +        // leave strings and buffers as-is
      +        // anything is only allowed if in object mode, so throw
      +        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
      +            if (isArrayBufferView(chunk)) {
      +                //@ts-ignore - sinful unsafe type changing
      +                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
      +            }
      +            else if (isArrayBufferLike(chunk)) {
      +                //@ts-ignore - sinful unsafe type changing
      +                chunk = Buffer.from(chunk);
      +            }
      +            else if (typeof chunk !== 'string') {
      +                throw new Error('Non-contiguous data written to non-objectMode stream');
      +            }
      +        }
      +        // handle object mode up front, since it's simpler
      +        // this yields better performance, fewer checks later.
      +        if (this[OBJECTMODE]) {
      +            // maybe impossible?
      +            /* c8 ignore start */
      +            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
      +                this[FLUSH](true);
      +            /* c8 ignore stop */
      +            if (this[FLOWING])
      +                this.emit('data', chunk);
      +            else
      +                this[BUFFERPUSH](chunk);
      +            if (this[BUFFERLENGTH] !== 0)
      +                this.emit('readable');
      +            if (cb)
      +                fn(cb);
      +            return this[FLOWING];
      +        }
      +        // at this point the chunk is a buffer or string
      +        // don't buffer it up or send it to the decoder
      +        if (!chunk.length) {
      +            if (this[BUFFERLENGTH] !== 0)
      +                this.emit('readable');
      +            if (cb)
      +                fn(cb);
      +            return this[FLOWING];
      +        }
      +        // fast-path writing strings of same encoding to a stream with
      +        // an empty buffer, skipping the buffer/decoder dance
      +        if (typeof chunk === 'string' &&
      +            // unless it is a string already ready for us to use
      +            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
      +            //@ts-ignore - sinful unsafe type change
      +            chunk = Buffer.from(chunk, encoding);
      +        }
      +        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
      +            //@ts-ignore - sinful unsafe type change
      +            chunk = this[DECODER].write(chunk);
      +        }
      +        // Note: flushing CAN potentially switch us into not-flowing mode
      +        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
      +            this[FLUSH](true);
      +        if (this[FLOWING])
      +            this.emit('data', chunk);
      +        else
      +            this[BUFFERPUSH](chunk);
      +        if (this[BUFFERLENGTH] !== 0)
      +            this.emit('readable');
      +        if (cb)
      +            fn(cb);
      +        return this[FLOWING];
      +    }
      +    /**
      +     * Low-level explicit read method.
      +     *
      +     * In objectMode, the argument is ignored, and one item is returned if
      +     * available.
      +     *
      +     * `n` is the number of bytes (or in the case of encoding streams,
      +     * characters) to consume. If `n` is not provided, then the entire buffer
      +     * is returned, or `null` is returned if no data is available.
      +     *
      +     * If `n` is greater that the amount of data in the internal buffer,
      +     * then `null` is returned.
      +     */
      +    read(n) {
      +        if (this[DESTROYED])
      +            return null;
      +        this[DISCARDED] = false;
      +        if (this[BUFFERLENGTH] === 0 ||
      +            n === 0 ||
      +            (n && n > this[BUFFERLENGTH])) {
      +            this[MAYBE_EMIT_END]();
      +            return null;
      +        }
      +        if (this[OBJECTMODE])
      +            n = null;
      +        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
      +            // not object mode, so if we have an encoding, then RType is string
      +            // otherwise, must be Buffer
      +            this[BUFFER] = [
      +                (this[ENCODING]
      +                    ? this[BUFFER].join('')
      +                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
      +            ];
      +        }
      +        const ret = this[READ](n || null, this[BUFFER][0]);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [READ](n, chunk) {
      +        if (this[OBJECTMODE])
      +            this[BUFFERSHIFT]();
      +        else {
      +            const c = chunk;
      +            if (n === c.length || n === null)
      +                this[BUFFERSHIFT]();
      +            else if (typeof c === 'string') {
      +                this[BUFFER][0] = c.slice(n);
      +                chunk = c.slice(0, n);
      +                this[BUFFERLENGTH] -= n;
      +            }
      +            else {
      +                this[BUFFER][0] = c.subarray(n);
      +                chunk = c.subarray(0, n);
      +                this[BUFFERLENGTH] -= n;
      +            }
      +        }
      +        this.emit('data', chunk);
      +        if (!this[BUFFER].length && !this[EOF])
      +            this.emit('drain');
      +        return chunk;
      +    }
      +    end(chunk, encoding, cb) {
      +        if (typeof chunk === 'function') {
      +            cb = chunk;
      +            chunk = undefined;
      +        }
      +        if (typeof encoding === 'function') {
      +            cb = encoding;
      +            encoding = 'utf8';
      +        }
      +        if (chunk !== undefined)
      +            this.write(chunk, encoding);
      +        if (cb)
      +            this.once('end', cb);
      +        this[EOF] = true;
      +        this.writable = false;
      +        // if we haven't written anything, then go ahead and emit,
      +        // even if we're not reading.
      +        // we'll re-emit if a new 'end' listener is added anyway.
      +        // This makes MP more suitable to write-only use cases.
      +        if (this[FLOWING] || !this[PAUSED])
      +            this[MAYBE_EMIT_END]();
      +        return this;
      +    }
      +    // don't let the internal resume be overwritten
      +    [RESUME]() {
      +        if (this[DESTROYED])
      +            return;
      +        if (!this[DATALISTENERS] && !this[PIPES].length) {
      +            this[DISCARDED] = true;
      +        }
      +        this[PAUSED] = false;
      +        this[FLOWING] = true;
      +        this.emit('resume');
      +        if (this[BUFFER].length)
      +            this[FLUSH]();
      +        else if (this[EOF])
      +            this[MAYBE_EMIT_END]();
      +        else
      +            this.emit('drain');
      +    }
      +    /**
      +     * Resume the stream if it is currently in a paused state
      +     *
      +     * If called when there are no pipe destinations or `data` event listeners,
      +     * this will place the stream in a "discarded" state, where all data will
      +     * be thrown away. The discarded state is removed if a pipe destination or
      +     * data handler is added, if pause() is called, or if any synchronous or
      +     * asynchronous iteration is started.
      +     */
      +    resume() {
      +        return this[RESUME]();
      +    }
      +    /**
      +     * Pause the stream
      +     */
      +    pause() {
      +        this[FLOWING] = false;
      +        this[PAUSED] = true;
      +        this[DISCARDED] = false;
      +    }
      +    /**
      +     * true if the stream has been forcibly destroyed
      +     */
      +    get destroyed() {
      +        return this[DESTROYED];
      +    }
      +    /**
      +     * true if the stream is currently in a flowing state, meaning that
      +     * any writes will be immediately emitted.
      +     */
      +    get flowing() {
      +        return this[FLOWING];
      +    }
      +    /**
      +     * true if the stream is currently in a paused state
      +     */
      +    get paused() {
      +        return this[PAUSED];
      +    }
      +    [BUFFERPUSH](chunk) {
      +        if (this[OBJECTMODE])
      +            this[BUFFERLENGTH] += 1;
      +        else
      +            this[BUFFERLENGTH] += chunk.length;
      +        this[BUFFER].push(chunk);
      +    }
      +    [BUFFERSHIFT]() {
      +        if (this[OBJECTMODE])
      +            this[BUFFERLENGTH] -= 1;
      +        else
      +            this[BUFFERLENGTH] -= this[BUFFER][0].length;
      +        return this[BUFFER].shift();
      +    }
      +    [FLUSH](noDrain = false) {
      +        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
      +            this[BUFFER].length);
      +        if (!noDrain && !this[BUFFER].length && !this[EOF])
      +            this.emit('drain');
      +    }
      +    [FLUSHCHUNK](chunk) {
      +        this.emit('data', chunk);
      +        return this[FLOWING];
      +    }
      +    /**
      +     * Pipe all data emitted by this stream into the destination provided.
      +     *
      +     * Triggers the flow of data.
      +     */
      +    pipe(dest, opts) {
      +        if (this[DESTROYED])
      +            return dest;
      +        this[DISCARDED] = false;
      +        const ended = this[EMITTED_END];
      +        opts = opts || {};
      +        if (dest === proc.stdout || dest === proc.stderr)
      +            opts.end = false;
      +        else
      +            opts.end = opts.end !== false;
      +        opts.proxyErrors = !!opts.proxyErrors;
      +        // piping an ended stream ends immediately
      +        if (ended) {
      +            if (opts.end)
      +                dest.end();
      +        }
      +        else {
      +            // "as" here just ignores the WType, which pipes don't care about,
      +            // since they're only consuming from us, and writing to the dest
      +            this[PIPES].push(!opts.proxyErrors
      +                ? new Pipe(this, dest, opts)
      +                : new PipeProxyErrors(this, dest, opts));
      +            if (this[ASYNC])
      +                defer(() => this[RESUME]());
      +            else
      +                this[RESUME]();
      +        }
      +        return dest;
      +    }
      +    /**
      +     * Fully unhook a piped destination stream.
      +     *
      +     * If the destination stream was the only consumer of this stream (ie,
      +     * there are no other piped destinations or `'data'` event listeners)
      +     * then the flow of data will stop until there is another consumer or
      +     * {@link Minipass#resume} is explicitly called.
      +     */
      +    unpipe(dest) {
      +        const p = this[PIPES].find(p => p.dest === dest);
      +        if (p) {
      +            if (this[PIPES].length === 1) {
      +                if (this[FLOWING] && this[DATALISTENERS] === 0) {
      +                    this[FLOWING] = false;
      +                }
      +                this[PIPES] = [];
      +            }
      +            else
      +                this[PIPES].splice(this[PIPES].indexOf(p), 1);
      +            p.unpipe();
      +        }
      +    }
      +    /**
      +     * Alias for {@link Minipass#on}
      +     */
      +    addListener(ev, handler) {
      +        return this.on(ev, handler);
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.on`, with the following
      +     * behavior differences to prevent data loss and unnecessary hangs:
      +     *
      +     * - Adding a 'data' event handler will trigger the flow of data
      +     *
      +     * - Adding a 'readable' event handler when there is data waiting to be read
      +     *   will cause 'readable' to be emitted immediately.
      +     *
      +     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
      +     *   already passed will cause the event to be emitted immediately and all
      +     *   handlers removed.
      +     *
      +     * - Adding an 'error' event handler after an error has been emitted will
      +     *   cause the event to be re-emitted immediately with the error previously
      +     *   raised.
      +     */
      +    on(ev, handler) {
      +        const ret = super.on(ev, handler);
      +        if (ev === 'data') {
      +            this[DISCARDED] = false;
      +            this[DATALISTENERS]++;
      +            if (!this[PIPES].length && !this[FLOWING]) {
      +                this[RESUME]();
      +            }
      +        }
      +        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
      +            super.emit('readable');
      +        }
      +        else if (isEndish(ev) && this[EMITTED_END]) {
      +            super.emit(ev);
      +            this.removeAllListeners(ev);
      +        }
      +        else if (ev === 'error' && this[EMITTED_ERROR]) {
      +            const h = handler;
      +            if (this[ASYNC])
      +                defer(() => h.call(this, this[EMITTED_ERROR]));
      +            else
      +                h.call(this, this[EMITTED_ERROR]);
      +        }
      +        return ret;
      +    }
      +    /**
      +     * Alias for {@link Minipass#off}
      +     */
      +    removeListener(ev, handler) {
      +        return this.off(ev, handler);
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.off`
      +     *
      +     * If a 'data' event handler is removed, and it was the last consumer
      +     * (ie, there are no pipe destinations or other 'data' event listeners),
      +     * then the flow of data will stop until there is another consumer or
      +     * {@link Minipass#resume} is explicitly called.
      +     */
      +    off(ev, handler) {
      +        const ret = super.off(ev, handler);
      +        // if we previously had listeners, and now we don't, and we don't
      +        // have any pipes, then stop the flow, unless it's been explicitly
      +        // put in a discarded flowing state via stream.resume().
      +        if (ev === 'data') {
      +            this[DATALISTENERS] = this.listeners('data').length;
      +            if (this[DATALISTENERS] === 0 &&
      +                !this[DISCARDED] &&
      +                !this[PIPES].length) {
      +                this[FLOWING] = false;
      +            }
      +        }
      +        return ret;
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.removeAllListeners`
      +     *
      +     * If all 'data' event handlers are removed, and they were the last consumer
      +     * (ie, there are no pipe destinations), then the flow of data will stop
      +     * until there is another consumer or {@link Minipass#resume} is explicitly
      +     * called.
      +     */
      +    removeAllListeners(ev) {
      +        const ret = super.removeAllListeners(ev);
      +        if (ev === 'data' || ev === undefined) {
      +            this[DATALISTENERS] = 0;
      +            if (!this[DISCARDED] && !this[PIPES].length) {
      +                this[FLOWING] = false;
      +            }
      +        }
      +        return ret;
      +    }
      +    /**
      +     * true if the 'end' event has been emitted
      +     */
      +    get emittedEnd() {
      +        return this[EMITTED_END];
      +    }
      +    [MAYBE_EMIT_END]() {
      +        if (!this[EMITTING_END] &&
      +            !this[EMITTED_END] &&
      +            !this[DESTROYED] &&
      +            this[BUFFER].length === 0 &&
      +            this[EOF]) {
      +            this[EMITTING_END] = true;
      +            this.emit('end');
      +            this.emit('prefinish');
      +            this.emit('finish');
      +            if (this[CLOSED])
      +                this.emit('close');
      +            this[EMITTING_END] = false;
      +        }
      +    }
      +    /**
      +     * Mostly identical to `EventEmitter.emit`, with the following
      +     * behavior differences to prevent data loss and unnecessary hangs:
      +     *
      +     * If the stream has been destroyed, and the event is something other
      +     * than 'close' or 'error', then `false` is returned and no handlers
      +     * are called.
      +     *
      +     * If the event is 'end', and has already been emitted, then the event
      +     * is ignored. If the stream is in a paused or non-flowing state, then
      +     * the event will be deferred until data flow resumes. If the stream is
      +     * async, then handlers will be called on the next tick rather than
      +     * immediately.
      +     *
      +     * If the event is 'close', and 'end' has not yet been emitted, then
      +     * the event will be deferred until after 'end' is emitted.
      +     *
      +     * If the event is 'error', and an AbortSignal was provided for the stream,
      +     * and there are no listeners, then the event is ignored, matching the
      +     * behavior of node core streams in the presense of an AbortSignal.
      +     *
      +     * If the event is 'finish' or 'prefinish', then all listeners will be
      +     * removed after emitting the event, to prevent double-firing.
      +     */
      +    emit(ev, ...args) {
      +        const data = args[0];
      +        // error and close are only events allowed after calling destroy()
      +        if (ev !== 'error' &&
      +            ev !== 'close' &&
      +            ev !== DESTROYED &&
      +            this[DESTROYED]) {
      +            return false;
      +        }
      +        else if (ev === 'data') {
      +            return !this[OBJECTMODE] && !data
      +                ? false
      +                : this[ASYNC]
      +                    ? (defer(() => this[EMITDATA](data)), true)
      +                    : this[EMITDATA](data);
      +        }
      +        else if (ev === 'end') {
      +            return this[EMITEND]();
      +        }
      +        else if (ev === 'close') {
      +            this[CLOSED] = true;
      +            // don't emit close before 'end' and 'finish'
      +            if (!this[EMITTED_END] && !this[DESTROYED])
      +                return false;
      +            const ret = super.emit('close');
      +            this.removeAllListeners('close');
      +            return ret;
      +        }
      +        else if (ev === 'error') {
      +            this[EMITTED_ERROR] = data;
      +            super.emit(ERROR, data);
      +            const ret = !this[SIGNAL] || this.listeners('error').length
      +                ? super.emit('error', data)
      +                : false;
      +            this[MAYBE_EMIT_END]();
      +            return ret;
      +        }
      +        else if (ev === 'resume') {
      +            const ret = super.emit('resume');
      +            this[MAYBE_EMIT_END]();
      +            return ret;
      +        }
      +        else if (ev === 'finish' || ev === 'prefinish') {
      +            const ret = super.emit(ev);
      +            this.removeAllListeners(ev);
      +            return ret;
      +        }
      +        // Some other unknown event
      +        const ret = super.emit(ev, ...args);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [EMITDATA](data) {
      +        for (const p of this[PIPES]) {
      +            if (p.dest.write(data) === false)
      +                this.pause();
      +        }
      +        const ret = this[DISCARDED] ? false : super.emit('data', data);
      +        this[MAYBE_EMIT_END]();
      +        return ret;
      +    }
      +    [EMITEND]() {
      +        if (this[EMITTED_END])
      +            return false;
      +        this[EMITTED_END] = true;
      +        this.readable = false;
      +        return this[ASYNC]
      +            ? (defer(() => this[EMITEND2]()), true)
      +            : this[EMITEND2]();
      +    }
      +    [EMITEND2]() {
      +        if (this[DECODER]) {
      +            const data = this[DECODER].end();
      +            if (data) {
      +                for (const p of this[PIPES]) {
      +                    p.dest.write(data);
      +                }
      +                if (!this[DISCARDED])
      +                    super.emit('data', data);
      +            }
      +        }
      +        for (const p of this[PIPES]) {
      +            p.end();
      +        }
      +        const ret = super.emit('end');
      +        this.removeAllListeners('end');
      +        return ret;
      +    }
      +    /**
      +     * Return a Promise that resolves to an array of all emitted data once
      +     * the stream ends.
      +     */
      +    async collect() {
      +        const buf = Object.assign([], {
      +            dataLength: 0,
      +        });
      +        if (!this[OBJECTMODE])
      +            buf.dataLength = 0;
      +        // set the promise first, in case an error is raised
      +        // by triggering the flow here.
      +        const p = this.promise();
      +        this.on('data', c => {
      +            buf.push(c);
      +            if (!this[OBJECTMODE])
      +                buf.dataLength += c.length;
      +        });
      +        await p;
      +        return buf;
      +    }
      +    /**
      +     * Return a Promise that resolves to the concatenation of all emitted data
      +     * once the stream ends.
      +     *
      +     * Not allowed on objectMode streams.
      +     */
      +    async concat() {
      +        if (this[OBJECTMODE]) {
      +            throw new Error('cannot concat in objectMode');
      +        }
      +        const buf = await this.collect();
      +        return (this[ENCODING]
      +            ? buf.join('')
      +            : Buffer.concat(buf, buf.dataLength));
      +    }
      +    /**
      +     * Return a void Promise that resolves once the stream ends.
      +     */
      +    async promise() {
      +        return new Promise((resolve, reject) => {
      +            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
      +            this.on('error', er => reject(er));
      +            this.on('end', () => resolve());
      +        });
      +    }
      +    /**
      +     * Asynchronous `for await of` iteration.
      +     *
      +     * This will continue emitting all chunks until the stream terminates.
      +     */
      +    [Symbol.asyncIterator]() {
      +        // set this up front, in case the consumer doesn't call next()
      +        // right away.
      +        this[DISCARDED] = false;
      +        let stopped = false;
      +        const stop = async () => {
      +            this.pause();
      +            stopped = true;
      +            return { value: undefined, done: true };
      +        };
      +        const next = () => {
      +            if (stopped)
      +                return stop();
      +            const res = this.read();
      +            if (res !== null)
      +                return Promise.resolve({ done: false, value: res });
      +            if (this[EOF])
      +                return stop();
      +            let resolve;
      +            let reject;
      +            const onerr = (er) => {
      +                this.off('data', ondata);
      +                this.off('end', onend);
      +                this.off(DESTROYED, ondestroy);
      +                stop();
      +                reject(er);
      +            };
      +            const ondata = (value) => {
      +                this.off('error', onerr);
      +                this.off('end', onend);
      +                this.off(DESTROYED, ondestroy);
      +                this.pause();
      +                resolve({ value, done: !!this[EOF] });
      +            };
      +            const onend = () => {
      +                this.off('error', onerr);
      +                this.off('data', ondata);
      +                this.off(DESTROYED, ondestroy);
      +                stop();
      +                resolve({ done: true, value: undefined });
      +            };
      +            const ondestroy = () => onerr(new Error('stream destroyed'));
      +            return new Promise((res, rej) => {
      +                reject = rej;
      +                resolve = res;
      +                this.once(DESTROYED, ondestroy);
      +                this.once('error', onerr);
      +                this.once('end', onend);
      +                this.once('data', ondata);
      +            });
      +        };
      +        return {
      +            next,
      +            throw: stop,
      +            return: stop,
      +            [Symbol.asyncIterator]() {
      +                return this;
      +            },
      +        };
      +    }
      +    /**
      +     * Synchronous `for of` iteration.
      +     *
      +     * The iteration will terminate when the internal buffer runs out, even
      +     * if the stream has not yet terminated.
      +     */
      +    [Symbol.iterator]() {
      +        // set this up front, in case the consumer doesn't call next()
      +        // right away.
      +        this[DISCARDED] = false;
      +        let stopped = false;
      +        const stop = () => {
      +            this.pause();
      +            this.off(ERROR, stop);
      +            this.off(DESTROYED, stop);
      +            this.off('end', stop);
      +            stopped = true;
      +            return { done: true, value: undefined };
      +        };
      +        const next = () => {
      +            if (stopped)
      +                return stop();
      +            const value = this.read();
      +            return value === null ? stop() : { done: false, value };
      +        };
      +        this.once('end', stop);
      +        this.once(ERROR, stop);
      +        this.once(DESTROYED, stop);
      +        return {
      +            next,
      +            throw: stop,
      +            return: stop,
      +            [Symbol.iterator]() {
      +                return this;
      +            },
      +        };
      +    }
      +    /**
      +     * Destroy a stream, preventing it from being used for any further purpose.
      +     *
      +     * If the stream has a `close()` method, then it will be called on
      +     * destruction.
      +     *
      +     * After destruction, any attempt to write data, read data, or emit most
      +     * events will be ignored.
      +     *
      +     * If an error argument is provided, then it will be emitted in an
      +     * 'error' event.
      +     */
      +    destroy(er) {
      +        if (this[DESTROYED]) {
      +            if (er)
      +                this.emit('error', er);
      +            else
      +                this.emit(DESTROYED);
      +            return this;
      +        }
      +        this[DESTROYED] = true;
      +        this[DISCARDED] = true;
      +        // throw away all buffered data, it's never coming out
      +        this[BUFFER].length = 0;
      +        this[BUFFERLENGTH] = 0;
      +        const wc = this;
      +        if (typeof wc.close === 'function' && !this[CLOSED])
      +            wc.close();
      +        if (er)
      +            this.emit('error', er);
      +        // if no error to emit, still reject pending promises
      +        else
      +            this.emit(DESTROYED);
      +        return this;
      +    }
      +    /**
      +     * Alias for {@link isStream}
      +     *
      +     * Former export location, maintained for backwards compatibility.
      +     *
      +     * @deprecated
      +     */
      +    static get isStream() {
      +        return isStream;
      +    }
      +}
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
      new file mode 100644
      index 00000000000000..3dbc1ca591c055
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "module"
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
      new file mode 100644
      index 00000000000000..6faaa247a5bc66
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
      @@ -0,0 +1,82 @@
      +{
      +  "name": "minipass",
      +  "version": "7.0.3",
      +  "description": "minimal implementation of a PassThrough stream",
      +  "main": "./dist/cjs/index.js",
      +  "module": "./dist/mjs/index.js",
      +  "types": "./dist/cjs/index.js",
      +  "exports": {
      +    ".": {
      +      "import": {
      +        "types": "./dist/mjs/index.d.ts",
      +        "default": "./dist/mjs/index.js"
      +      },
      +      "require": {
      +        "types": "./dist/cjs/index.d.ts",
      +        "default": "./dist/cjs/index.js"
      +      }
      +    },
      +    "./package.json": "./package.json"
      +  },
      +  "files": [
      +    "dist"
      +  ],
      +  "scripts": {
      +    "preversion": "npm test",
      +    "postversion": "npm publish",
      +    "prepublishOnly": "git push origin --follow-tags",
      +    "preprepare": "rm -rf dist",
      +    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
      +    "pretest": "npm run prepare",
      +    "presnap": "npm run prepare",
      +    "test": "c8 tap",
      +    "snap": "c8 tap",
      +    "format": "prettier --write . --loglevel warn",
      +    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
      +  },
      +  "tap": {
      +    "coverage": false,
      +    "node-arg": [
      +      "--enable-source-maps",
      +      "--no-warnings",
      +      "--loader",
      +      "ts-node/esm"
      +    ],
      +    "ts": false
      +  },
      +  "prettier": {
      +    "semi": false,
      +    "printWidth": 75,
      +    "tabWidth": 2,
      +    "useTabs": false,
      +    "singleQuote": true,
      +    "jsxSingleQuote": false,
      +    "bracketSameLine": true,
      +    "arrowParens": "avoid",
      +    "endOfLine": "lf"
      +  },
      +  "devDependencies": {
      +    "@types/node": "^20.1.2",
      +    "@types/tap": "^15.0.8",
      +    "c8": "^7.13.0",
      +    "prettier": "^2.6.2",
      +    "tap": "^16.3.0",
      +    "ts-node": "^10.9.1",
      +    "typedoc": "^0.24.8",
      +    "typescript": "^5.1.3",
      +    "end-of-stream": "^1.4.0",
      +    "node-abort-controller": "^3.1.1",
      +    "sync-content": "^1.0.2",
      +    "through2": "^2.0.3"
      +  },
      +  "repository": "https://github.com/isaacs/minipass",
      +  "keywords": [
      +    "passthrough",
      +    "stream"
      +  ],
      +  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
      +  "license": "ISC",
      +  "engines": {
      +    "node": ">=16 || 14 >=14.17"
      +  }
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json
      new file mode 100644
      index 00000000000000..ab58cb8b7c50f4
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json
      @@ -0,0 +1,82 @@
      +{
      +  "name": "cacache",
      +  "version": "17.1.4",
      +  "cache-version": {
      +    "content": "2",
      +    "index": "5"
      +  },
      +  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
      +  "main": "lib/index.js",
      +  "files": [
      +    "bin/",
      +    "lib/"
      +  ],
      +  "scripts": {
      +    "test": "tap",
      +    "snap": "tap",
      +    "coverage": "tap",
      +    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
      +    "lint": "eslint \"**/*.js\"",
      +    "npmclilint": "npmcli-lint",
      +    "lintfix": "npm run lint -- --fix",
      +    "postsnap": "npm run lintfix --",
      +    "postlint": "template-oss-check",
      +    "posttest": "npm run lint",
      +    "template-oss-apply": "template-oss-apply --force"
      +  },
      +  "repository": {
      +    "type": "git",
      +    "url": "https://github.com/npm/cacache.git"
      +  },
      +  "keywords": [
      +    "cache",
      +    "caching",
      +    "content-addressable",
      +    "sri",
      +    "sri hash",
      +    "subresource integrity",
      +    "cache",
      +    "storage",
      +    "store",
      +    "file store",
      +    "filesystem",
      +    "disk cache",
      +    "disk storage"
      +  ],
      +  "license": "ISC",
      +  "dependencies": {
      +    "@npmcli/fs": "^3.1.0",
      +    "fs-minipass": "^3.0.0",
      +    "glob": "^10.2.2",
      +    "lru-cache": "^7.7.1",
      +    "minipass": "^7.0.3",
      +    "minipass-collect": "^1.0.2",
      +    "minipass-flush": "^1.0.5",
      +    "minipass-pipeline": "^1.2.4",
      +    "p-map": "^4.0.0",
      +    "ssri": "^10.0.0",
      +    "tar": "^6.1.11",
      +    "unique-filename": "^3.0.0"
      +  },
      +  "devDependencies": {
      +    "@npmcli/eslint-config": "^4.0.0",
      +    "@npmcli/template-oss": "4.18.0",
      +    "tap": "^16.0.0"
      +  },
      +  "engines": {
      +    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +  },
      +  "templateOSS": {
      +    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      +    "windowsCI": false,
      +    "version": "4.18.0",
      +    "publish": "true"
      +  },
      +  "author": "GitHub Inc.",
      +  "tap": {
      +    "nyc-arg": [
      +      "--exclude",
      +      "tap-snapshots/**"
      +    ]
      +  }
      +}
      diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/LICENSE
      similarity index 100%
      rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/LICENSE
      rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/LICENSE
      diff --git a/deps/npm/node_modules/lru-cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.js
      similarity index 100%
      rename from deps/npm/node_modules/lru-cache/index.js
      rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.js
      diff --git a/deps/npm/node_modules/lru-cache/index.mjs b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.mjs
      similarity index 100%
      rename from deps/npm/node_modules/lru-cache/index.mjs
      rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.mjs
      diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
      similarity index 55%
      rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json
      rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
      index 69a20582ff9b6f..9684991727e7a2 100644
      --- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json
      +++ b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
      @@ -1,7 +1,7 @@
       {
         "name": "lru-cache",
         "description": "A cache object that deletes the least-recently-used items.",
      -  "version": "9.1.1",
      +  "version": "7.18.3",
         "author": "Isaac Z. Schlueter ",
         "keywords": [
           "mru",
      @@ -11,47 +11,34 @@
         "sideEffects": false,
         "scripts": {
           "build": "npm run prepare",
      -    "preprepare": "rm -rf dist",
      -    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
      -    "postprepare": "bash fixup.sh",
           "pretest": "npm run prepare",
           "presnap": "npm run prepare",
      -    "test": "c8 tap",
      -    "snap": "c8 tap",
      +    "prepare": "node ./scripts/transpile-to-esm.js",
      +    "size": "size-limit",
      +    "test": "tap",
      +    "snap": "tap",
           "preversion": "npm test",
           "postversion": "npm publish",
           "prepublishOnly": "git push origin --follow-tags",
           "format": "prettier --write .",
      -    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
      -    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
      -    "prebenchmark": "npm run prepare",
      -    "benchmark": "make -C benchmark",
      -    "preprofile": "npm run prepare",
      -    "profile": "make -C benchmark profile"
      +    "typedoc": "typedoc ./index.d.ts"
         },
      -  "main": "./dist/cjs/index.js",
      -  "module": "./dist/mjs/index.js",
      +  "type": "commonjs",
      +  "main": "./index.js",
      +  "module": "./index.mjs",
      +  "types": "./index.d.ts",
         "exports": {
      -    "./min": {
      -      "import": {
      -        "types": "./dist/mjs/index.d.ts",
      -        "default": "./dist/mjs/index.min.js"
      -      },
      -      "require": {
      -        "types": "./dist/cjs/index.d.ts",
      -        "default": "./dist/cjs/index.min.js"
      -      }
      -    },
           ".": {
             "import": {
      -        "types": "./dist/mjs/index.d.ts",
      -        "default": "./dist/mjs/index.js"
      +        "types": "./index.d.ts",
      +        "default": "./index.mjs"
             },
             "require": {
      -        "types": "./dist/cjs/index.d.ts",
      -        "default": "./dist/cjs/index.js"
      +        "types": "./index.d.ts",
      +        "default": "./index.js"
             }
      -    }
      +    },
      +    "./package.json": "./package.json"
         },
         "repository": "git://github.com/isaacs/node-lru-cache.git",
         "devDependencies": {
      @@ -61,10 +48,7 @@
           "benchmark": "^2.1.4",
           "c8": "^7.11.2",
           "clock-mock": "^1.0.6",
      -    "esbuild": "^0.17.11",
           "eslint-config-prettier": "^8.5.0",
      -    "marked": "^4.2.12",
      -    "mkdirp": "^2.1.5",
           "prettier": "^2.6.2",
           "size-limit": "^7.0.8",
           "tap": "^16.3.4",
      @@ -75,10 +59,12 @@
         },
         "license": "ISC",
         "files": [
      -    "dist"
      +    "index.js",
      +    "index.mjs",
      +    "index.d.ts"
         ],
         "engines": {
      -    "node": "14 || >=16.14"
      +    "node": ">=12"
         },
         "prettier": {
           "semi": false,
      @@ -92,18 +78,19 @@
           "endOfLine": "lf"
         },
         "tap": {
      -    "coverage": false,
      +    "nyc-arg": [
      +      "--include=index.js"
      +    ],
           "node-arg": [
             "--expose-gc",
      -      "--no-warnings",
      -      "--loader",
      -      "ts-node/esm"
      +      "--require",
      +      "ts-node/register"
           ],
           "ts": false
         },
         "size-limit": [
           {
      -      "path": "./dist/mjs/index.js"
      +      "path": "./index.js"
           }
         ]
       }
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
      new file mode 100644
      index 00000000000000..1808eb2844231c
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
      @@ -0,0 +1,16 @@
      +ISC License
      +
      +Copyright 2017-2022 (c) npm, Inc.
      +
      +Permission to use, copy, modify, and/or distribute this software for
      +any purpose with or without fee is hereby granted, provided that the
      +above copyright notice and this permission notice appear in all copies.
      +
      +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
      +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
      +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
      +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
      +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
      +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
      +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
      +USE OR PERFORMANCE OF THIS SOFTWARE.
      diff --git a/deps/npm/node_modules/make-fetch-happen/lib/agent.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
      similarity index 100%
      rename from deps/npm/node_modules/make-fetch-happen/lib/agent.js
      rename to deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
      new file mode 100644
      index 00000000000000..45141095074ecb
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
      @@ -0,0 +1,469 @@
      +const { Request, Response } = require('minipass-fetch')
      +const { Minipass } = require('minipass')
      +const MinipassFlush = require('minipass-flush')
      +const cacache = require('cacache')
      +const url = require('url')
      +
      +const CachingMinipassPipeline = require('../pipeline.js')
      +const CachePolicy = require('./policy.js')
      +const cacheKey = require('./key.js')
      +const remote = require('../remote.js')
      +
      +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
      +
      +// allow list for request headers that will be written to the cache index
      +// note: we will also store any request headers
      +// that are named in a response's vary header
      +const KEEP_REQUEST_HEADERS = [
      +  'accept-charset',
      +  'accept-encoding',
      +  'accept-language',
      +  'accept',
      +  'cache-control',
      +]
      +
      +// allow list for response headers that will be written to the cache index
      +// note: we must not store the real response's age header, or when we load
      +// a cache policy based on the metadata it will think the cached response
      +// is always stale
      +const KEEP_RESPONSE_HEADERS = [
      +  'cache-control',
      +  'content-encoding',
      +  'content-language',
      +  'content-type',
      +  'date',
      +  'etag',
      +  'expires',
      +  'last-modified',
      +  'link',
      +  'location',
      +  'pragma',
      +  'vary',
      +]
      +
      +// return an object containing all metadata to be written to the index
      +const getMetadata = (request, response, options) => {
      +  const metadata = {
      +    time: Date.now(),
      +    url: request.url,
      +    reqHeaders: {},
      +    resHeaders: {},
      +
      +    // options on which we must match the request and vary the response
      +    options: {
      +      compress: options.compress != null ? options.compress : request.compress,
      +    },
      +  }
      +
      +  // only save the status if it's not a 200 or 304
      +  if (response.status !== 200 && response.status !== 304) {
      +    metadata.status = response.status
      +  }
      +
      +  for (const name of KEEP_REQUEST_HEADERS) {
      +    if (request.headers.has(name)) {
      +      metadata.reqHeaders[name] = request.headers.get(name)
      +    }
      +  }
      +
      +  // if the request's host header differs from the host in the url
      +  // we need to keep it, otherwise it's just noise and we ignore it
      +  const host = request.headers.get('host')
      +  const parsedUrl = new url.URL(request.url)
      +  if (host && parsedUrl.host !== host) {
      +    metadata.reqHeaders.host = host
      +  }
      +
      +  // if the response has a vary header, make sure
      +  // we store the relevant request headers too
      +  if (response.headers.has('vary')) {
      +    const vary = response.headers.get('vary')
      +    // a vary of "*" means every header causes a different response.
      +    // in that scenario, we do not include any additional headers
      +    // as the freshness check will always fail anyway and we don't
      +    // want to bloat the cache indexes
      +    if (vary !== '*') {
      +      // copy any other request headers that will vary the response
      +      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
      +      for (const name of varyHeaders) {
      +        if (request.headers.has(name)) {
      +          metadata.reqHeaders[name] = request.headers.get(name)
      +        }
      +      }
      +    }
      +  }
      +
      +  for (const name of KEEP_RESPONSE_HEADERS) {
      +    if (response.headers.has(name)) {
      +      metadata.resHeaders[name] = response.headers.get(name)
      +    }
      +  }
      +
      +  for (const name of options.cacheAdditionalHeaders) {
      +    if (response.headers.has(name)) {
      +      metadata.resHeaders[name] = response.headers.get(name)
      +    }
      +  }
      +
      +  return metadata
      +}
      +
      +// symbols used to hide objects that may be lazily evaluated in a getter
      +const _request = Symbol('request')
      +const _response = Symbol('response')
      +const _policy = Symbol('policy')
      +
      +class CacheEntry {
      +  constructor ({ entry, request, response, options }) {
      +    if (entry) {
      +      this.key = entry.key
      +      this.entry = entry
      +      // previous versions of this module didn't write an explicit timestamp in
      +      // the metadata, so fall back to the entry's timestamp. we can't use the
      +      // entry timestamp to determine staleness because cacache will update it
      +      // when it verifies its data
      +      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
      +    } else {
      +      this.key = cacheKey(request)
      +    }
      +
      +    this.options = options
      +
      +    // these properties are behind getters that lazily evaluate
      +    this[_request] = request
      +    this[_response] = response
      +    this[_policy] = null
      +  }
      +
      +  // returns a CacheEntry instance that satisfies the given request
      +  // or undefined if no existing entry satisfies
      +  static async find (request, options) {
      +    try {
      +      // compacts the index and returns an array of unique entries
      +      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
      +        const entryA = new CacheEntry({ entry: A, options })
      +        const entryB = new CacheEntry({ entry: B, options })
      +        return entryA.policy.satisfies(entryB.request)
      +      }, {
      +        validateEntry: (entry) => {
      +          // clean out entries with a buggy content-encoding value
      +          if (entry.metadata &&
      +              entry.metadata.resHeaders &&
      +              entry.metadata.resHeaders['content-encoding'] === null) {
      +            return false
      +          }
      +
      +          // if an integrity is null, it needs to have a status specified
      +          if (entry.integrity === null) {
      +            return !!(entry.metadata && entry.metadata.status)
      +          }
      +
      +          return true
      +        },
      +      })
      +    } catch (err) {
      +      // if the compact request fails, ignore the error and return
      +      return
      +    }
      +
      +    // a cache mode of 'reload' means to behave as though we have no cache
      +    // on the way to the network. return undefined to allow cacheFetch to
      +    // create a brand new request no matter what.
      +    if (options.cache === 'reload') {
      +      return
      +    }
      +
      +    // find the specific entry that satisfies the request
      +    let match
      +    for (const entry of matches) {
      +      const _entry = new CacheEntry({
      +        entry,
      +        options,
      +      })
      +
      +      if (_entry.policy.satisfies(request)) {
      +        match = _entry
      +        break
      +      }
      +    }
      +
      +    return match
      +  }
      +
      +  // if the user made a PUT/POST/PATCH then we invalidate our
      +  // cache for the same url by deleting the index entirely
      +  static async invalidate (request, options) {
      +    const key = cacheKey(request)
      +    try {
      +      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
      +    } catch (err) {
      +      // ignore errors
      +    }
      +  }
      +
      +  get request () {
      +    if (!this[_request]) {
      +      this[_request] = new Request(this.entry.metadata.url, {
      +        method: 'GET',
      +        headers: this.entry.metadata.reqHeaders,
      +        ...this.entry.metadata.options,
      +      })
      +    }
      +
      +    return this[_request]
      +  }
      +
      +  get response () {
      +    if (!this[_response]) {
      +      this[_response] = new Response(null, {
      +        url: this.entry.metadata.url,
      +        counter: this.options.counter,
      +        status: this.entry.metadata.status || 200,
      +        headers: {
      +          ...this.entry.metadata.resHeaders,
      +          'content-length': this.entry.size,
      +        },
      +      })
      +    }
      +
      +    return this[_response]
      +  }
      +
      +  get policy () {
      +    if (!this[_policy]) {
      +      this[_policy] = new CachePolicy({
      +        entry: this.entry,
      +        request: this.request,
      +        response: this.response,
      +        options: this.options,
      +      })
      +    }
      +
      +    return this[_policy]
      +  }
      +
      +  // wraps the response in a pipeline that stores the data
      +  // in the cache while the user consumes it
      +  async store (status) {
      +    // if we got a status other than 200, 301, or 308,
      +    // or the CachePolicy forbid storage, append the
      +    // cache status header and return it untouched
      +    if (
      +      this.request.method !== 'GET' ||
      +      ![200, 301, 308].includes(this.response.status) ||
      +      !this.policy.storable()
      +    ) {
      +      this.response.headers.set('x-local-cache-status', 'skip')
      +      return this.response
      +    }
      +
      +    const size = this.response.headers.get('content-length')
      +    const cacheOpts = {
      +      algorithms: this.options.algorithms,
      +      metadata: getMetadata(this.request, this.response, this.options),
      +      size,
      +      integrity: this.options.integrity,
      +      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
      +    }
      +
      +    let body = null
      +    // we only set a body if the status is a 200, redirects are
      +    // stored as metadata only
      +    if (this.response.status === 200) {
      +      let cacheWriteResolve, cacheWriteReject
      +      const cacheWritePromise = new Promise((resolve, reject) => {
      +        cacheWriteResolve = resolve
      +        cacheWriteReject = reject
      +      })
      +
      +      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
      +        flush () {
      +          return cacheWritePromise
      +        },
      +      }))
      +      // this is always true since if we aren't reusing the one from the remote fetch, we
      +      // are using the one from cacache
      +      body.hasIntegrityEmitter = true
      +
      +      const onResume = () => {
      +        const tee = new Minipass()
      +        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
      +        // re-emit the integrity and size events on our new response body so they can be reused
      +        cacheStream.on('integrity', i => body.emit('integrity', i))
      +        cacheStream.on('size', s => body.emit('size', s))
      +        // stick a flag on here so downstream users will know if they can expect integrity events
      +        tee.pipe(cacheStream)
      +        // TODO if the cache write fails, log a warning but return the response anyway
      +        // eslint-disable-next-line promise/catch-or-return
      +        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
      +        body.unshift(tee)
      +        body.unshift(this.response.body)
      +      }
      +
      +      body.once('resume', onResume)
      +      body.once('end', () => body.removeListener('resume', onResume))
      +    } else {
      +      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
      +    }
      +
      +    // note: we do not set the x-local-cache-hash header because we do not know
      +    // the hash value until after the write to the cache completes, which doesn't
      +    // happen until after the response has been sent and it's too late to write
      +    // the header anyway
      +    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
      +    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
      +    this.response.headers.set('x-local-cache-mode', 'stream')
      +    this.response.headers.set('x-local-cache-status', status)
      +    this.response.headers.set('x-local-cache-time', new Date().toISOString())
      +    const newResponse = new Response(body, {
      +      url: this.response.url,
      +      status: this.response.status,
      +      headers: this.response.headers,
      +      counter: this.options.counter,
      +    })
      +    return newResponse
      +  }
      +
      +  // use the cached data to create a response and return it
      +  async respond (method, options, status) {
      +    let response
      +    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
      +      // if the request is a HEAD, or the response is a redirect,
      +      // then the metadata in the entry already includes everything
      +      // we need to build a response
      +      response = this.response
      +    } else {
      +      // we're responding with a full cached response, so create a body
      +      // that reads from cacache and attach it to a new Response
      +      const body = new Minipass()
      +      const headers = { ...this.policy.responseHeaders() }
      +
      +      const onResume = () => {
      +        const cacheStream = cacache.get.stream.byDigest(
      +          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
      +        )
      +        cacheStream.on('error', async (err) => {
      +          cacheStream.pause()
      +          if (err.code === 'EINTEGRITY') {
      +            await cacache.rm.content(
      +              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
      +            )
      +          }
      +          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
      +            await CacheEntry.invalidate(this.request, this.options)
      +          }
      +          body.emit('error', err)
      +          cacheStream.resume()
      +        })
      +        // emit the integrity and size events based on our metadata so we're consistent
      +        body.emit('integrity', this.entry.integrity)
      +        body.emit('size', Number(headers['content-length']))
      +        cacheStream.pipe(body)
      +      }
      +
      +      body.once('resume', onResume)
      +      body.once('end', () => body.removeListener('resume', onResume))
      +      response = new Response(body, {
      +        url: this.entry.metadata.url,
      +        counter: options.counter,
      +        status: 200,
      +        headers,
      +      })
      +    }
      +
      +    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
      +    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
      +    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
      +    response.headers.set('x-local-cache-mode', 'stream')
      +    response.headers.set('x-local-cache-status', status)
      +    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
      +    return response
      +  }
      +
      +  // use the provided request along with this cache entry to
      +  // revalidate the stored response. returns a response, either
      +  // from the cache or from the update
      +  async revalidate (request, options) {
      +    const revalidateRequest = new Request(request, {
      +      headers: this.policy.revalidationHeaders(request),
      +    })
      +
      +    try {
      +      // NOTE: be sure to remove the headers property from the
      +      // user supplied options, since we have already defined
      +      // them on the new request object. if they're still in the
      +      // options then those will overwrite the ones from the policy
      +      var response = await remote(revalidateRequest, {
      +        ...options,
      +        headers: undefined,
      +      })
      +    } catch (err) {
      +      // if the network fetch fails, return the stale
      +      // cached response unless it has a cache-control
      +      // of 'must-revalidate'
      +      if (!this.policy.mustRevalidate) {
      +        return this.respond(request.method, options, 'stale')
      +      }
      +
      +      throw err
      +    }
      +
      +    if (this.policy.revalidated(revalidateRequest, response)) {
      +      // we got a 304, write a new index to the cache and respond from cache
      +      const metadata = getMetadata(request, response, options)
      +      // 304 responses do not include headers that are specific to the response data
      +      // since they do not include a body, so we copy values for headers that were
      +      // in the old cache entry to the new one, if the new metadata does not already
      +      // include that header
      +      for (const name of KEEP_RESPONSE_HEADERS) {
      +        if (
      +          !hasOwnProperty(metadata.resHeaders, name) &&
      +          hasOwnProperty(this.entry.metadata.resHeaders, name)
      +        ) {
      +          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
      +        }
      +      }
      +
      +      for (const name of options.cacheAdditionalHeaders) {
      +        const inMeta = hasOwnProperty(metadata.resHeaders, name)
      +        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
      +        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
      +
      +        // if the header is in the existing entry, but it is not in the metadata
      +        // then we need to write it to the metadata as this will refresh the on-disk cache
      +        if (!inMeta && inEntry) {
      +          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
      +        }
      +        // if the header is in the metadata, but not in the policy, then we need to set
      +        // it in the policy so that it's included in the immediate response. future
      +        // responses will load a new cache entry, so we don't need to change that
      +        if (!inPolicy && inMeta) {
      +          this.policy.response.headers[name] = metadata.resHeaders[name]
      +        }
      +      }
      +
      +      try {
      +        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
      +          size: this.entry.size,
      +          metadata,
      +        })
      +      } catch (err) {
      +        // if updating the cache index fails, we ignore it and
      +        // respond anyway
      +      }
      +      return this.respond(request.method, options, 'revalidated')
      +    }
      +
      +    // if we got a modified response, create a new entry based on it
      +    const newEntry = new CacheEntry({
      +      request,
      +      response,
      +      options,
      +    })
      +
      +    // respond with the new entry while writing it to the cache
      +    return newEntry.store('updated')
      +  }
      +}
      +
      +module.exports = CacheEntry
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
      new file mode 100644
      index 00000000000000..67a66573bebe66
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
      @@ -0,0 +1,11 @@
      +class NotCachedError extends Error {
      +  constructor (url) {
      +    /* eslint-disable-next-line max-len */
      +    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
      +    this.code = 'ENOTCACHED'
      +  }
      +}
      +
      +module.exports = {
      +  NotCachedError,
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
      new file mode 100644
      index 00000000000000..0de49d23fb9336
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
      @@ -0,0 +1,49 @@
      +const { NotCachedError } = require('./errors.js')
      +const CacheEntry = require('./entry.js')
      +const remote = require('../remote.js')
      +
      +// do whatever is necessary to get a Response and return it
      +const cacheFetch = async (request, options) => {
      +  // try to find a cached entry that satisfies this request
      +  const entry = await CacheEntry.find(request, options)
      +  if (!entry) {
      +    // no cached result, if the cache mode is 'only-if-cached' that's a failure
      +    if (options.cache === 'only-if-cached') {
      +      throw new NotCachedError(request.url)
      +    }
      +
      +    // otherwise, we make a request, store it and return it
      +    const response = await remote(request, options)
      +    const newEntry = new CacheEntry({ request, response, options })
      +    return newEntry.store('miss')
      +  }
      +
      +  // we have a cached response that satisfies this request, however if the cache
      +  // mode is 'no-cache' then we send the revalidation request no matter what
      +  if (options.cache === 'no-cache') {
      +    return entry.revalidate(request, options)
      +  }
      +
      +  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
      +  // 'only-if-cached' we can respond with the cached entry. set the status
      +  // based on the result of needsRevalidation and respond
      +  const _needsRevalidation = entry.policy.needsRevalidation(request)
      +  if (options.cache === 'force-cache' ||
      +      options.cache === 'only-if-cached' ||
      +      !_needsRevalidation) {
      +    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
      +  }
      +
      +  // if we got here, the cache entry is stale so revalidate it
      +  return entry.revalidate(request, options)
      +}
      +
      +cacheFetch.invalidate = async (request, options) => {
      +  if (!options.cachePath) {
      +    return
      +  }
      +
      +  return CacheEntry.invalidate(request, options)
      +}
      +
      +module.exports = cacheFetch
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
      new file mode 100644
      index 00000000000000..f7684d562b7fae
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
      @@ -0,0 +1,17 @@
      +const { URL, format } = require('url')
      +
      +// options passed to url.format() when generating a key
      +const formatOptions = {
      +  auth: false,
      +  fragment: false,
      +  search: true,
      +  unicode: false,
      +}
      +
      +// returns a string to be used as the cache key for the Request
      +const cacheKey = (request) => {
      +  const parsed = new URL(request.url)
      +  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
      +}
      +
      +module.exports = cacheKey
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
      new file mode 100644
      index 00000000000000..ada3c8600dae92
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
      @@ -0,0 +1,161 @@
      +const CacheSemantics = require('http-cache-semantics')
      +const Negotiator = require('negotiator')
      +const ssri = require('ssri')
      +
      +// options passed to http-cache-semantics constructor
      +const policyOptions = {
      +  shared: false,
      +  ignoreCargoCult: true,
      +}
      +
      +// a fake empty response, used when only testing the
      +// request for storability
      +const emptyResponse = { status: 200, headers: {} }
      +
      +// returns a plain object representation of the Request
      +const requestObject = (request) => {
      +  const _obj = {
      +    method: request.method,
      +    url: request.url,
      +    headers: {},
      +    compress: request.compress,
      +  }
      +
      +  request.headers.forEach((value, key) => {
      +    _obj.headers[key] = value
      +  })
      +
      +  return _obj
      +}
      +
      +// returns a plain object representation of the Response
      +const responseObject = (response) => {
      +  const _obj = {
      +    status: response.status,
      +    headers: {},
      +  }
      +
      +  response.headers.forEach((value, key) => {
      +    _obj.headers[key] = value
      +  })
      +
      +  return _obj
      +}
      +
      +class CachePolicy {
      +  constructor ({ entry, request, response, options }) {
      +    this.entry = entry
      +    this.request = requestObject(request)
      +    this.response = responseObject(response)
      +    this.options = options
      +    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
      +
      +    if (this.entry) {
      +      // if we have an entry, copy the timestamp to the _responseTime
      +      // this is necessary because the CacheSemantics constructor forces
      +      // the value to Date.now() which means a policy created from a
      +      // cache entry is likely to always identify itself as stale
      +      this.policy._responseTime = this.entry.metadata.time
      +    }
      +  }
      +
      +  // static method to quickly determine if a request alone is storable
      +  static storable (request, options) {
      +    // no cachePath means no caching
      +    if (!options.cachePath) {
      +      return false
      +    }
      +
      +    // user explicitly asked not to cache
      +    if (options.cache === 'no-store') {
      +      return false
      +    }
      +
      +    // we only cache GET and HEAD requests
      +    if (!['GET', 'HEAD'].includes(request.method)) {
      +      return false
      +    }
      +
      +    // otherwise, let http-cache-semantics make the decision
      +    // based on the request's headers
      +    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
      +    return policy.storable()
      +  }
      +
      +  // returns true if the policy satisfies the request
      +  satisfies (request) {
      +    const _req = requestObject(request)
      +    if (this.request.headers.host !== _req.headers.host) {
      +      return false
      +    }
      +
      +    if (this.request.compress !== _req.compress) {
      +      return false
      +    }
      +
      +    const negotiatorA = new Negotiator(this.request)
      +    const negotiatorB = new Negotiator(_req)
      +
      +    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
      +      return false
      +    }
      +
      +    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
      +      return false
      +    }
      +
      +    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
      +      return false
      +    }
      +
      +    if (this.options.integrity) {
      +      return ssri.parse(this.options.integrity).match(this.entry.integrity)
      +    }
      +
      +    return true
      +  }
      +
      +  // returns true if the request and response allow caching
      +  storable () {
      +    return this.policy.storable()
      +  }
      +
      +  // NOTE: this is a hack to avoid parsing the cache-control
      +  // header ourselves, it returns true if the response's
      +  // cache-control contains must-revalidate
      +  get mustRevalidate () {
      +    return !!this.policy._rescc['must-revalidate']
      +  }
      +
      +  // returns true if the cached response requires revalidation
      +  // for the given request
      +  needsRevalidation (request) {
      +    const _req = requestObject(request)
      +    // force method to GET because we only cache GETs
      +    // but can serve a HEAD from a cached GET
      +    _req.method = 'GET'
      +    return !this.policy.satisfiesWithoutRevalidation(_req)
      +  }
      +
      +  responseHeaders () {
      +    return this.policy.responseHeaders()
      +  }
      +
      +  // returns a new object containing the appropriate headers
      +  // to send a revalidation request
      +  revalidationHeaders (request) {
      +    const _req = requestObject(request)
      +    return this.policy.revalidationHeaders(_req)
      +  }
      +
      +  // returns true if the request/response was revalidated
      +  // successfully. returns false if a new response was received
      +  revalidated (request, response) {
      +    const _req = requestObject(request)
      +    const _res = responseObject(response)
      +    const policy = this.policy.revalidatedPolicy(_req, _res)
      +    return !policy.modified
      +  }
      +}
      +
      +module.exports = CachePolicy
      diff --git a/deps/npm/node_modules/make-fetch-happen/lib/dns.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js
      similarity index 100%
      rename from deps/npm/node_modules/make-fetch-happen/lib/dns.js
      rename to deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
      new file mode 100644
      index 00000000000000..233ba67e165502
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
      @@ -0,0 +1,118 @@
      +'use strict'
      +
      +const { FetchError, Request, isRedirect } = require('minipass-fetch')
      +const url = require('url')
      +
      +const CachePolicy = require('./cache/policy.js')
      +const cache = require('./cache/index.js')
      +const remote = require('./remote.js')
      +
      +// given a Request, a Response and user options
      +// return true if the response is a redirect that
      +// can be followed. we throw errors that will result
      +// in the fetch being rejected if the redirect is
      +// possible but invalid for some reason
      +const canFollowRedirect = (request, response, options) => {
      +  if (!isRedirect(response.status)) {
      +    return false
      +  }
      +
      +  if (options.redirect === 'manual') {
      +    return false
      +  }
      +
      +  if (options.redirect === 'error') {
      +    throw new FetchError(`redirect mode is set to error: ${request.url}`,
      +      'no-redirect', { code: 'ENOREDIRECT' })
      +  }
      +
      +  if (!response.headers.has('location')) {
      +    throw new FetchError(`redirect location header missing for: ${request.url}`,
      +      'no-location', { code: 'EINVALIDREDIRECT' })
      +  }
      +
      +  if (request.counter >= request.follow) {
      +    throw new FetchError(`maximum redirect reached at: ${request.url}`,
      +      'max-redirect', { code: 'EMAXREDIRECT' })
      +  }
      +
      +  return true
      +}
      +
      +// given a Request, a Response, and the user's options return an object
      +// with a new Request and a new options object that will be used for
      +// following the redirect
      +const getRedirect = (request, response, options) => {
      +  const _opts = { ...options }
      +  const location = response.headers.get('location')
      +  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
      +  // Comment below is used under the following license:
      +  /**
      +   * @license
      +   * Copyright (c) 2010-2012 Mikeal Rogers
      +   * Licensed under the Apache License, Version 2.0 (the "License");
      +   * you may not use this file except in compliance with the License.
      +   * You may obtain a copy of the License at
      +   * http://www.apache.org/licenses/LICENSE-2.0
      +   * Unless required by applicable law or agreed to in writing,
      +   * software distributed under the License is distributed on an "AS
      +   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
      +   * express or implied. See the License for the specific language
      +   * governing permissions and limitations under the License.
      +   */
      +
      +  // Remove authorization if changing hostnames (but not if just
      +  // changing ports or protocols).  This matches the behavior of request:
      +  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
      +  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
      +    request.headers.delete('authorization')
      +    request.headers.delete('cookie')
      +  }
      +
      +  // for POST request with 301/302 response, or any request with 303 response,
      +  // use GET when following redirect
      +  if (
      +    response.status === 303 ||
      +    (request.method === 'POST' && [301, 302].includes(response.status))
      +  ) {
      +    _opts.method = 'GET'
      +    _opts.body = null
      +    request.headers.delete('content-length')
      +  }
      +
      +  _opts.headers = {}
      +  request.headers.forEach((value, key) => {
      +    _opts.headers[key] = value
      +  })
      +
      +  _opts.counter = ++request.counter
      +  const redirectReq = new Request(url.format(redirectUrl), _opts)
      +  return {
      +    request: redirectReq,
      +    options: _opts,
      +  }
      +}
      +
      +const fetch = async (request, options) => {
      +  const response = CachePolicy.storable(request, options)
      +    ? await cache(request, options)
      +    : await remote(request, options)
      +
      +  // if the request wasn't a GET or HEAD, and the response
      +  // status is between 200 and 399 inclusive, invalidate the
      +  // request url
      +  if (!['GET', 'HEAD'].includes(request.method) &&
      +      response.status >= 200 &&
      +      response.status <= 399) {
      +    await cache.invalidate(request, options)
      +  }
      +
      +  if (!canFollowRedirect(request, response, options)) {
      +    return response
      +  }
      +
      +  const redirect = getRedirect(request, response, options)
      +  return fetch(redirect.request, redirect.options)
      +}
      +
      +module.exports = fetch
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
      new file mode 100644
      index 00000000000000..2f12e8e1b61131
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
      @@ -0,0 +1,41 @@
      +const { FetchError, Headers, Request, Response } = require('minipass-fetch')
      +
      +const configureOptions = require('./options.js')
      +const fetch = require('./fetch.js')
      +
      +const makeFetchHappen = (url, opts) => {
      +  const options = configureOptions(opts)
      +
      +  const request = new Request(url, options)
      +  return fetch(request, options)
      +}
      +
      +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
      +  if (typeof defaultUrl === 'object') {
      +    defaultOptions = defaultUrl
      +    defaultUrl = null
      +  }
      +
      +  const defaultedFetch = (url, options = {}) => {
      +    const finalUrl = url || defaultUrl
      +    const finalOptions = {
      +      ...defaultOptions,
      +      ...options,
      +      headers: {
      +        ...defaultOptions.headers,
      +        ...options.headers,
      +      },
      +    }
      +    return wrappedFetch(finalUrl, finalOptions)
      +  }
      +
      +  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
      +    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
      +  return defaultedFetch
      +}
      +
      +module.exports = makeFetchHappen
      +module.exports.FetchError = FetchError
      +module.exports.Headers = Headers
      +module.exports.Request = Request
      +module.exports.Response = Response
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
      new file mode 100644
      index 00000000000000..f77511279f831d
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
      @@ -0,0 +1,54 @@
      +const dns = require('dns')
      +
      +const conditionalHeaders = [
      +  'if-modified-since',
      +  'if-none-match',
      +  'if-unmodified-since',
      +  'if-match',
      +  'if-range',
      +]
      +
      +const configureOptions = (opts) => {
      +  const { strictSSL, ...options } = { ...opts }
      +  options.method = options.method ? options.method.toUpperCase() : 'GET'
      +  options.rejectUnauthorized = strictSSL !== false
      +
      +  if (!options.retry) {
      +    options.retry = { retries: 0 }
      +  } else if (typeof options.retry === 'string') {
      +    const retries = parseInt(options.retry, 10)
      +    if (isFinite(retries)) {
      +      options.retry = { retries }
      +    } else {
      +      options.retry = { retries: 0 }
      +    }
      +  } else if (typeof options.retry === 'number') {
      +    options.retry = { retries: options.retry }
      +  } else {
      +    options.retry = { retries: 0, ...options.retry }
      +  }
      +
      +  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
      +
      +  options.cache = options.cache || 'default'
      +  if (options.cache === 'default') {
      +    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
      +      return conditionalHeaders.includes(name.toLowerCase())
      +    })
      +    if (hasConditionalHeader) {
      +      options.cache = 'no-store'
      +    }
      +  }
      +
      +  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
      +
      +  // cacheManager is deprecated, but if it's set and
      +  // cachePath is not we should copy it to the new field
      +  if (options.cacheManager && !options.cachePath) {
      +    options.cachePath = options.cacheManager
      +  }
      +
      +  return options
      +}
      +
      +module.exports = configureOptions
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
      new file mode 100644
      index 00000000000000..b1d221b2d0ce31
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
      @@ -0,0 +1,41 @@
      +'use strict'
      +
      +const MinipassPipeline = require('minipass-pipeline')
      +
      +class CachingMinipassPipeline extends MinipassPipeline {
      +  #events = []
      +  #data = new Map()
      +
      +  constructor (opts, ...streams) {
      +    // CRITICAL: do NOT pass the streams to the call to super(), this will start
      +    // the flow of data and potentially cause the events we need to catch to emit
      +    // before we've finished our own setup. instead we call super() with no args,
      +    // finish our setup, and then push the streams into ourselves to start the
      +    // data flow
      +    super()
      +    this.#events = opts.events
      +
      +    /* istanbul ignore next - coverage disabled because this is pointless to test here */
      +    if (streams.length) {
      +      this.push(...streams)
      +    }
      +  }
      +
      +  on (event, handler) {
      +    if (this.#events.includes(event) && this.#data.has(event)) {
      +      return handler(...this.#data.get(event))
      +    }
      +
      +    return super.on(event, handler)
      +  }
      +
      +  emit (event, ...data) {
      +    if (this.#events.includes(event)) {
      +      this.#data.set(event, data)
      +    }
      +
      +    return super.emit(event, ...data)
      +  }
      +}
      +
      +module.exports = CachingMinipassPipeline
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
      new file mode 100644
      index 00000000000000..bdbcc79cad908d
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
      @@ -0,0 +1,121 @@
      +const { Minipass } = require('minipass')
      +const fetch = require('minipass-fetch')
      +const promiseRetry = require('promise-retry')
      +const ssri = require('ssri')
      +
      +const CachingMinipassPipeline = require('./pipeline.js')
      +const getAgent = require('./agent.js')
      +const pkg = require('../package.json')
      +
      +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
      +
      +const RETRY_ERRORS = [
      +  'ECONNRESET', // remote socket closed on us
      +  'ECONNREFUSED', // remote host refused to open connection
      +  'EADDRINUSE', // failed to bind to a local port (proxy?)
      +  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
      +  'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
      +  // Known codes we do NOT retry on:
      +  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
      +]
      +
      +const RETRY_TYPES = [
      +  'request-timeout',
      +]
      +
      +// make a request directly to the remote source,
      +// retrying certain classes of errors as well as
      +// following redirects (through the cache if necessary)
      +// and verifying response integrity
      +const remoteFetch = (request, options) => {
      +  const agent = getAgent(request.url, options)
      +  if (!request.headers.has('connection')) {
      +    request.headers.set('connection', agent ? 'keep-alive' : 'close')
      +  }
      +
      +  if (!request.headers.has('user-agent')) {
      +    request.headers.set('user-agent', USER_AGENT)
      +  }
      +
      +  // keep our own options since we're overriding the agent
      +  // and the redirect mode
      +  const _opts = {
      +    ...options,
      +    agent,
      +    redirect: 'manual',
      +  }
      +
      +  return promiseRetry(async (retryHandler, attemptNum) => {
      +    const req = new fetch.Request(request, _opts)
      +    try {
      +      let res = await fetch(req, _opts)
      +      if (_opts.integrity && res.status === 200) {
      +        // we got a 200 response and the user has specified an expected
      +        // integrity value, so wrap the response in an ssri stream to verify it
      +        const integrityStream = ssri.integrityStream({
      +          algorithms: _opts.algorithms,
      +          integrity: _opts.integrity,
      +          size: _opts.size,
      +        })
      +        const pipeline = new CachingMinipassPipeline({
      +          events: ['integrity', 'size'],
      +        }, res.body, integrityStream)
      +        // we also propagate the integrity and size events out to the pipeline so we can use
      +        // this new response body as an integrityEmitter for cacache
      +        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
      +        integrityStream.on('size', s => pipeline.emit('size', s))
      +        res = new fetch.Response(pipeline, res)
      +        // set an explicit flag so we know if our response body will emit integrity and size
      +        res.body.hasIntegrityEmitter = true
      +      }
      +
      +      res.headers.set('x-fetch-attempts', attemptNum)
      +
      +      // do not retry POST requests, or requests with a streaming body
      +      // do retry requests with a 408, 420, 429 or 500+ status in the response
      +      const isStream = Minipass.isStream(req.body)
      +      const isRetriable = req.method !== 'POST' &&
      +          !isStream &&
      +          ([408, 420, 429].includes(res.status) || res.status >= 500)
      +
      +      if (isRetriable) {
      +        if (typeof options.onRetry === 'function') {
      +          options.onRetry(res)
      +        }
      +
      +        return retryHandler(res)
      +      }
      +
      +      return res
      +    } catch (err) {
      +      const code = (err.code === 'EPROMISERETRY')
      +        ? err.retried.code
      +        : err.code
      +
      +      // err.retried will be the thing that was thrown from above
      +      // if it's a response, we just got a bad status code and we
      +      // can re-throw to allow the retry
      +      const isRetryError = err.retried instanceof fetch.Response ||
      +        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
      +
      +      if (req.method === 'POST' || isRetryError) {
      +        throw err
      +      }
      +
      +      if (typeof options.onRetry === 'function') {
      +        options.onRetry(err)
      +      }
      +
      +      return retryHandler(err)
      +    }
      +  }, options.retry).catch((err) => {
      +    // don't reject for http errors, just return them
      +    if (err.status >= 400 && err.type !== 'system') {
      +      return err
      +    }
      +
      +    throw err
      +  })
      +}
      +
      +module.exports = remoteFetch
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
      new file mode 100644
      index 00000000000000..fd415dc9966faa
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
      @@ -0,0 +1,78 @@
      +{
      +  "name": "make-fetch-happen",
      +  "version": "11.1.1",
      +  "description": "Opinionated, caching, retrying fetch client",
      +  "main": "lib/index.js",
      +  "files": [
      +    "bin/",
      +    "lib/"
      +  ],
      +  "scripts": {
      +    "test": "tap",
      +    "posttest": "npm run lint",
      +    "eslint": "eslint",
      +    "lint": "eslint \"**/*.js\"",
      +    "lintfix": "npm run lint -- --fix",
      +    "postlint": "template-oss-check",
      +    "snap": "tap",
      +    "template-oss-apply": "template-oss-apply --force"
      +  },
      +  "repository": {
      +    "type": "git",
      +    "url": "https://github.com/npm/make-fetch-happen.git"
      +  },
      +  "keywords": [
      +    "http",
      +    "request",
      +    "fetch",
      +    "mean girls",
      +    "caching",
      +    "cache",
      +    "subresource integrity"
      +  ],
      +  "author": "GitHub Inc.",
      +  "license": "ISC",
      +  "dependencies": {
      +    "agentkeepalive": "^4.2.1",
      +    "cacache": "^17.0.0",
      +    "http-cache-semantics": "^4.1.1",
      +    "http-proxy-agent": "^5.0.0",
      +    "https-proxy-agent": "^5.0.0",
      +    "is-lambda": "^1.0.1",
      +    "lru-cache": "^7.7.1",
      +    "minipass": "^5.0.0",
      +    "minipass-fetch": "^3.0.0",
      +    "minipass-flush": "^1.0.5",
      +    "minipass-pipeline": "^1.2.4",
      +    "negotiator": "^0.6.3",
      +    "promise-retry": "^2.0.1",
      +    "socks-proxy-agent": "^7.0.0",
      +    "ssri": "^10.0.0"
      +  },
      +  "devDependencies": {
      +    "@npmcli/eslint-config": "^4.0.0",
      +    "@npmcli/template-oss": "4.14.1",
      +    "nock": "^13.2.4",
      +    "safe-buffer": "^5.2.1",
      +    "standard-version": "^9.3.2",
      +    "tap": "^16.0.0"
      +  },
      +  "engines": {
      +    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +  },
      +  "tap": {
      +    "color": 1,
      +    "files": "test/*.js",
      +    "check-coverage": true,
      +    "timeout": 60,
      +    "nyc-arg": [
      +      "--exclude",
      +      "tap-snapshots/**"
      +    ]
      +  },
      +  "templateOSS": {
      +    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      +    "version": "4.14.1",
      +    "publish": "true"
      +  }
      +}
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE
      new file mode 100644
      index 00000000000000..97f8e32ed82e4c
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE
      @@ -0,0 +1,15 @@
      +The ISC License
      +
      +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
      +
      +Permission to use, copy, modify, and/or distribute this software for any
      +purpose with or without fee is hereby granted, provided that the above
      +copyright notice and this permission notice appear in all copies.
      +
      +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
      +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
      +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
      +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
      +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
      +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
      +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
      diff --git a/deps/npm/node_modules/minipass/index.js b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.js
      similarity index 100%
      rename from deps/npm/node_modules/minipass/index.js
      rename to deps/npm/node_modules/node-gyp/node_modules/minipass/index.js
      diff --git a/deps/npm/node_modules/minipass/index.mjs b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
      similarity index 99%
      rename from deps/npm/node_modules/minipass/index.mjs
      rename to deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
      index d1be109c9fc063..89b3fbf1a4d445 100644
      --- a/deps/npm/node_modules/minipass/index.mjs
      +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
      @@ -698,4 +698,3 @@ export class Minipass extends Stream {
           )
         }
       }
      -
      diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json b/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json
      new file mode 100644
      index 00000000000000..0e20e988047f23
      --- /dev/null
      +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json
      @@ -0,0 +1,76 @@
      +{
      +  "name": "minipass",
      +  "version": "5.0.0",
      +  "description": "minimal implementation of a PassThrough stream",
      +  "main": "./index.js",
      +  "module": "./index.mjs",
      +  "types": "./index.d.ts",
      +  "exports": {
      +    ".": {
      +      "import": {
      +        "types": "./index.d.ts",
      +        "default": "./index.mjs"
      +      },
      +      "require": {
      +        "types": "./index.d.ts",
      +        "default": "./index.js"
      +      }
      +    },
      +    "./package.json": "./package.json"
      +  },
      +  "devDependencies": {
      +    "@types/node": "^17.0.41",
      +    "end-of-stream": "^1.4.0",
      +    "node-abort-controller": "^3.1.1",
      +    "prettier": "^2.6.2",
      +    "tap": "^16.2.0",
      +    "through2": "^2.0.3",
      +    "ts-node": "^10.8.1",
      +    "typedoc": "^0.23.24",
      +    "typescript": "^4.7.3"
      +  },
      +  "scripts": {
      +    "pretest": "npm run prepare",
      +    "presnap": "npm run prepare",
      +    "prepare": "node ./scripts/transpile-to-esm.js",
      +    "snap": "tap",
      +    "test": "tap",
      +    "preversion": "npm test",
      +    "postversion": "npm publish",
      +    "postpublish": "git push origin --follow-tags",
      +    "typedoc": "typedoc ./index.d.ts",
      +    "format": "prettier --write . --loglevel warn"
      +  },
      +  "repository": {
      +    "type": "git",
      +    "url": "git+https://github.com/isaacs/minipass.git"
      +  },
      +  "keywords": [
      +    "passthrough",
      +    "stream"
      +  ],
      +  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
      +  "license": "ISC",
      +  "files": [
      +    "index.d.ts",
      +    "index.js",
      +    "index.mjs"
      +  ],
      +  "tap": {
      +    "check-coverage": true
      +  },
      +  "engines": {
      +    "node": ">=8"
      +  },
      +  "prettier": {
      +    "semi": false,
      +    "printWidth": 80,
      +    "tabWidth": 2,
      +    "useTabs": false,
      +    "singleQuote": true,
      +    "jsxSingleQuote": false,
      +    "bracketSameLine": true,
      +    "arrowParens": "avoid",
      +    "endOfLine": "lf"
      +  }
      +}
      diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json
      index ec2773bfbe6bf8..48d2371d4a66b5 100644
      --- a/deps/npm/node_modules/normalize-package-data/package.json
      +++ b/deps/npm/node_modules/normalize-package-data/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "normalize-package-data",
      -  "version": "5.0.0",
      +  "version": "6.0.0",
         "author": "GitHub Inc.",
         "description": "Normalizes data that can be found in package.json files.",
         "license": "BSD-2-Clause",
      @@ -21,14 +21,14 @@
           "template-oss-apply": "template-oss-apply --force"
         },
         "dependencies": {
      -    "hosted-git-info": "^6.0.0",
      +    "hosted-git-info": "^7.0.0",
           "is-core-module": "^2.8.1",
           "semver": "^7.3.5",
           "validate-npm-package-license": "^3.0.4"
         },
         "devDependencies": {
      -    "@npmcli/eslint-config": "^3.0.1",
      -    "@npmcli/template-oss": "4.5.1",
      +    "@npmcli/eslint-config": "^4.0.0",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "files": [
      @@ -36,11 +36,18 @@
           "lib/"
         ],
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.5.1"
      +    "version": "4.18.0",
      +    "publish": "true",
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ]
         },
         "tap": {
           "branches": 86,
      diff --git a/deps/npm/node_modules/npm-install-checks/lib/index.js b/deps/npm/node_modules/npm-install-checks/lib/index.js
      index fa5f593aaac647..f0ba2c07ad0812 100644
      --- a/deps/npm/node_modules/npm-install-checks/lib/index.js
      +++ b/deps/npm/node_modules/npm-install-checks/lib/index.js
      @@ -22,13 +22,13 @@ const checkEngine = (target, npmVer, nodeVer, force = false) => {
       
       const isMusl = (file) => file.includes('libc.musl-') || file.includes('ld-musl-')
       
      -const checkPlatform = (target, force = false) => {
      +const checkPlatform = (target, force = false, environment = {}) => {
         if (force) {
           return
         }
       
      -  const platform = process.platform
      -  const arch = process.arch
      +  const platform = environment.os || process.platform
      +  const arch = environment.cpu || process.arch
         const osOk = target.os ? checkList(platform, target.os) : true
         const cpuOk = target.cpu ? checkList(arch, target.cpu) : true
       
      diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json
      index 192cf68837146f..50378808d75d08 100644
      --- a/deps/npm/node_modules/npm-install-checks/package.json
      +++ b/deps/npm/node_modules/npm-install-checks/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "npm-install-checks",
      -  "version": "6.1.1",
      +  "version": "6.2.0",
         "description": "Check the engines and platform fields in package.json",
         "main": "lib/index.js",
         "dependencies": {
      @@ -8,7 +8,7 @@
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.13.0",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "scripts": {
      @@ -39,7 +39,7 @@
         "author": "GitHub Inc.",
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.13.0",
      +    "version": "4.18.0",
           "publish": "true"
         },
         "tap": {
      diff --git a/deps/npm/node_modules/npm-package-arg/lib/npa.js b/deps/npm/node_modules/npm-package-arg/lib/npa.js
      index 36bd18cd9f9a6e..f5ede2326e7b47 100644
      --- a/deps/npm/node_modules/npm-package-arg/lib/npa.js
      +++ b/deps/npm/node_modules/npm-package-arg/lib/npa.js
      @@ -257,40 +257,23 @@ function fromFile (res, where) {
           })
         }
       
      -  // environment switch for testing
      -  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
      -    // XXX backwards compatibility lack of compliance with 8909
      -    // Remove when we want a breaking change to come into RFC compliance.
      -    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
      -      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
      -      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
      -      specUrl = new url.URL(rawSpec)
      -      rawNoPrefix = rawSpec.replace(/^file:/, '')
      -    }
      -    // turn file:/../foo into file:../foo
      -    // for 1, 2 or 3 leading slashes since we attempted
      -    // in the previous step to make it a file protocol url with a leading slash
      -    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
      -      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
      -      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
      -      specUrl = new url.URL(rawSpec)
      -      rawNoPrefix = rawSpec.replace(/^file:/, '')
      -    }
      -    // XXX end 8909 violation backwards compatibility section
      -  }
      -
      -  // file:foo - relative url to ./foo
      -  // file:/foo - absolute path /foo
      -  // file:///foo - absolute path to /foo, no authority host
      -  // file://localhost/foo - absolute path to /foo, on localhost
      -  // file://foo - absolute path to / on foo host (error!)
      +  // XXX backwards compatibility lack of compliance with RFC 8909
         if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
      -    const msg = `Invalid file: URL, must be absolute if // present`
      -    throw Object.assign(new Error(msg), {
      -      raw: res.rawSpec,
      -      parsed: resolvedUrl,
      -    })
      +    const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
      +    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
      +    specUrl = new url.URL(rawSpec)
      +    rawNoPrefix = rawSpec.replace(/^file:/, '')
      +  }
      +  // turn file:/../foo into file:../foo
      +  // for 1, 2 or 3 leading slashes since we attempted
      +  // in the previous step to make it a file protocol url with a leading slash
      +  if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
      +    const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
      +    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
      +    specUrl = new url.URL(rawSpec)
      +    rawNoPrefix = rawSpec.replace(/^file:/, '')
         }
      +  // XXX end RFC 8909 violation backwards compatibility section
       
         // turn /C:/blah into just C:/blah on windows
         let specPath = decodeURIComponent(specUrl.pathname)
      diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json
      index bb9e71b258a939..9ba1d135f3ebf0 100644
      --- a/deps/npm/node_modules/npm-package-arg/package.json
      +++ b/deps/npm/node_modules/npm-package-arg/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "npm-package-arg",
      -  "version": "10.1.0",
      +  "version": "11.0.0",
         "description": "Parse the things that can be arguments to `npm install`",
         "main": "./lib/npa.js",
         "directories": {
      @@ -11,14 +11,14 @@
           "lib/"
         ],
         "dependencies": {
      -    "hosted-git-info": "^6.0.0",
      +    "hosted-git-info": "^7.0.0",
           "proc-log": "^3.0.0",
           "semver": "^7.3.5",
           "validate-npm-package-name": "^5.0.0"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.10.0",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "scripts": {
      @@ -43,7 +43,7 @@
         },
         "homepage": "https://github.com/npm/npm-package-arg",
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "tap": {
           "branches": 97,
      @@ -54,6 +54,13 @@
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.10.0"
      +    "version": "4.18.0",
      +    "publish": true,
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ]
         }
       }
      diff --git a/deps/npm/node_modules/npm-packlist/lib/index.js b/deps/npm/node_modules/npm-packlist/lib/index.js
      index 887018bd7d424c..7577cba0b865d4 100644
      --- a/deps/npm/node_modules/npm-packlist/lib/index.js
      +++ b/deps/npm/node_modules/npm-packlist/lib/index.js
      @@ -38,13 +38,22 @@ const defaults = [
       ]
       
       const strictDefaults = [
      -  // these are forcibly included at all levels
      +  // these are forcibly excluded
      +  '/.git',
      +]
      +
      +const allLevels = [
      +  // these are included by default but can be excluded by package.json files array
         '!/readme{,.*[^~$]}',
         '!/copying{,.*[^~$]}',
         '!/license{,.*[^~$]}',
         '!/licence{,.*[^~$]}',
      -  // these are forcibly excluded
      -  '/.git',
      +]
      +
      +const rootOnly = [
      +  /^!.*readme/i,
      +  /^!.*copying/i,
      +  /^!.*licen[sc]e/i,
       ]
       
       const normalizePath = (path) => path.split('\\').join('/')
      @@ -132,6 +141,7 @@ class PackWalker extends IgnoreWalker {
             // known required files for this directory
             this.injectRules(strictRules, [
               ...strictDefaults,
      +        ...allLevels,
               ...this.requiredFiles.map((file) => `!${file}`),
             ])
           }
      @@ -284,6 +294,7 @@ class PackWalker extends IgnoreWalker {
           const ignores = []
           const strict = [
             ...strictDefaults,
      +      ...allLevels,
             '!/package.json',
             '/.git',
             '/node_modules',
      @@ -304,6 +315,9 @@ class PackWalker extends IgnoreWalker {
                 file = file.slice(0, -2)
               }
               const inverse = `!${file}`
      +
      +        this.excludeNonRoot(file)
      +
               try {
                 // if an entry in the files array is a specific file, then we need to include it as a
                 // strict requirement for this package. if it's a directory or a pattern, it's a default
      @@ -352,6 +366,20 @@ class PackWalker extends IgnoreWalker {
           this.injectRules(strictRules, strict, callback)
         }
       
      +  // excludes non root files by checking if elements from the files array in
      +  // package.json contain an ! and readme/license/licence/copying, and then
      +  // removing readme/license/licence/copying accordingly from strict defaults
      +  excludeNonRoot (file) {
      +    // Find the pattern
      +    const matchingPattern = rootOnly.find(regex => regex.test(file))
      +
      +    if (matchingPattern) {
      +      // Find which index matches the pattern and remove it from allLevels
      +      const indexToRemove = allLevels.findIndex(element => matchingPattern.test(element))
      +      allLevels.splice(indexToRemove, 1)
      +    }
      +  }
      +
         // custom method: after we've finished gathering the files for the root package, we call this
         // before emitting the 'done' event in order to gather all of the files for bundled deps
         async gatherBundles () {
      diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json
      index 6023ad34df3b42..460ca7e30ad23f 100644
      --- a/deps/npm/node_modules/npm-packlist/package.json
      +++ b/deps/npm/node_modules/npm-packlist/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "npm-packlist",
      -  "version": "7.0.4",
      +  "version": "8.0.0",
         "description": "Get a list of the files to add from a folder into an npm package",
         "directories": {
           "test": "test"
      @@ -18,7 +18,7 @@
         "devDependencies": {
           "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.10.0",
      +    "@npmcli/template-oss": "4.18.0",
           "mutate-fs": "^2.1.1",
           "tap": "^16.0.1"
         },
      @@ -55,6 +55,7 @@
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.10.0"
      +    "version": "4.18.0",
      +    "publish": true
         }
       }
      diff --git a/deps/npm/node_modules/npm-pick-manifest/lib/index.js b/deps/npm/node_modules/npm-pick-manifest/lib/index.js
      index f2934e9ca1822a..8dbd2721c89963 100644
      --- a/deps/npm/node_modules/npm-pick-manifest/lib/index.js
      +++ b/deps/npm/node_modules/npm-pick-manifest/lib/index.js
      @@ -210,7 +210,7 @@ module.exports = (packument, wanted, opts = {}) => {
           code,
           type: npa.resolve(packument.name, wanted).type,
           wanted,
      -    versions: Object.keys(packument.versions),
      +    versions: Object.keys(packument.versions ?? {}),
           name,
           distTags: packument['dist-tags'],
           defaultTag,
      diff --git a/deps/npm/node_modules/npm-pick-manifest/package.json b/deps/npm/node_modules/npm-pick-manifest/package.json
      index 89ff8966f1a39b..e30c2cfe341fc6 100644
      --- a/deps/npm/node_modules/npm-pick-manifest/package.json
      +++ b/deps/npm/node_modules/npm-pick-manifest/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "npm-pick-manifest",
      -  "version": "8.0.1",
      +  "version": "9.0.0",
         "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
         "main": "./lib",
         "files": [
      @@ -31,12 +31,12 @@
         "dependencies": {
           "npm-install-checks": "^6.0.0",
           "npm-normalize-package-bin": "^3.0.0",
      -    "npm-package-arg": "^10.0.0",
      +    "npm-package-arg": "^11.0.0",
           "semver": "^7.3.5"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.6.1",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "tap": {
      @@ -47,10 +47,17 @@
           ]
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.6.1"
      +    "version": "4.18.0",
      +    "publish": true,
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ]
         }
       }
      diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json
      index 9c0b77b8a6dd5d..af57e9e73509c3 100644
      --- a/deps/npm/node_modules/npm-profile/package.json
      +++ b/deps/npm/node_modules/npm-profile/package.json
      @@ -1,12 +1,12 @@
       {
         "name": "npm-profile",
      -  "version": "7.0.1",
      +  "version": "9.0.0",
         "description": "Library for updating an npmjs.com profile",
         "keywords": [],
         "author": "GitHub Inc.",
         "license": "ISC",
         "dependencies": {
      -    "npm-registry-fetch": "^14.0.0",
      +    "npm-registry-fetch": "^16.0.0",
           "proc-log": "^3.0.0"
         },
         "main": "./lib/index.js",
      @@ -20,7 +20,7 @@
         ],
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.5.1",
      +    "@npmcli/template-oss": "4.18.0",
           "nock": "^13.2.4",
           "tap": "^16.0.1"
         },
      @@ -41,10 +41,17 @@
           ]
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.5.1"
      +    "version": "4.18.0",
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ],
      +    "publish": true
         }
       }
      diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json
      index 63a44725886ccf..2afadf939743b8 100644
      --- a/deps/npm/node_modules/npm-registry-fetch/package.json
      +++ b/deps/npm/node_modules/npm-registry-fetch/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "npm-registry-fetch",
      -  "version": "14.0.5",
      +  "version": "16.0.0",
         "description": "Fetch-based http client for use with npm registry APIs",
         "main": "lib",
         "files": [
      @@ -31,18 +31,18 @@
         "author": "GitHub Inc.",
         "license": "ISC",
         "dependencies": {
      -    "make-fetch-happen": "^11.0.0",
      -    "minipass": "^5.0.0",
      +    "make-fetch-happen": "^13.0.0",
      +    "minipass": "^7.0.2",
           "minipass-fetch": "^3.0.0",
           "minipass-json-stream": "^1.0.1",
           "minizlib": "^2.1.2",
      -    "npm-package-arg": "^10.0.0",
      +    "npm-package-arg": "^11.0.0",
           "proc-log": "^3.0.0"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.14.1",
      -    "cacache": "^17.0.0",
      +    "@npmcli/template-oss": "4.18.0",
      +    "cacache": "^18.0.0",
           "nock": "^13.2.4",
           "require-inject": "^1.4.4",
           "ssri": "^10.0.0",
      @@ -57,11 +57,17 @@
           ]
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.14.1",
      -    "publish": "true"
      +    "version": "4.18.0",
      +    "publish": "true",
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ]
         }
       }
      diff --git a/deps/npm/node_modules/pacote/lib/registry.js b/deps/npm/node_modules/pacote/lib/registry.js
      index 34d9b2b87f3f3d..993fd3f08a6d91 100644
      --- a/deps/npm/node_modules/pacote/lib/registry.js
      +++ b/deps/npm/node_modules/pacote/lib/registry.js
      @@ -8,7 +8,7 @@ const pickManifest = require('npm-pick-manifest')
       const ssri = require('ssri')
       const crypto = require('crypto')
       const npa = require('npm-package-arg')
      -const { sigstore } = require('sigstore')
      +const sigstore = require('sigstore')
       
       // Corgis are cute. 🐕🐶
       const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
      @@ -299,7 +299,7 @@ class RegistryFetcher extends Fetcher {
                       tufCachePath: this.tufCache,
                       keySelector: publicKey ? () => publicKey.pemkey : undefined,
                     }
      -              await sigstore.verify(bundle, null, options)
      +              await sigstore.verify(bundle, options)
                   } catch (e) {
                     throw Object.assign(new Error(
                       `${mani._id} failed to verify attestation: ${e.message}`
      diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json
      index bc8d984704af5b..4654b03d988c32 100644
      --- a/deps/npm/node_modules/pacote/package.json
      +++ b/deps/npm/node_modules/pacote/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "pacote",
      -  "version": "15.2.0",
      +  "version": "17.0.4",
         "description": "JavaScript package downloader",
         "author": "GitHub Inc.",
         "bin": {
      @@ -27,8 +27,8 @@
         "devDependencies": {
           "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.14.1",
      -    "hosted-git-info": "^6.0.0",
      +    "@npmcli/template-oss": "4.18.0",
      +    "hosted-git-info": "^7.0.0",
           "mutate-fs": "^2.1.1",
           "nock": "^13.2.4",
           "npm-registry-mock": "^1.3.2",
      @@ -44,27 +44,27 @@
           "git"
         ],
         "dependencies": {
      -    "@npmcli/git": "^4.0.0",
      +    "@npmcli/git": "^5.0.0",
           "@npmcli/installed-package-contents": "^2.0.1",
      -    "@npmcli/promise-spawn": "^6.0.1",
      -    "@npmcli/run-script": "^6.0.0",
      -    "cacache": "^17.0.0",
      +    "@npmcli/promise-spawn": "^7.0.0",
      +    "@npmcli/run-script": "^7.0.0",
      +    "cacache": "^18.0.0",
           "fs-minipass": "^3.0.0",
      -    "minipass": "^5.0.0",
      -    "npm-package-arg": "^10.0.0",
      -    "npm-packlist": "^7.0.0",
      -    "npm-pick-manifest": "^8.0.0",
      -    "npm-registry-fetch": "^14.0.0",
      +    "minipass": "^7.0.2",
      +    "npm-package-arg": "^11.0.0",
      +    "npm-packlist": "^8.0.0",
      +    "npm-pick-manifest": "^9.0.0",
      +    "npm-registry-fetch": "^16.0.0",
           "proc-log": "^3.0.0",
           "promise-retry": "^2.0.1",
      -    "read-package-json": "^6.0.0",
      +    "read-package-json": "^7.0.0",
           "read-package-json-fast": "^3.0.0",
      -    "sigstore": "^1.3.0",
      +    "sigstore": "^2.0.0",
           "ssri": "^10.0.0",
           "tar": "^6.1.11"
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "repository": {
           "type": "git",
      @@ -72,7 +72,13 @@
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.14.1",
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ],
      +    "version": "4.18.0",
           "windowsCI": false,
           "publish": "true"
         }
      diff --git a/deps/npm/node_modules/path-scurry/dist/cjs/index.js b/deps/npm/node_modules/path-scurry/dist/cjs/index.js
      index 8044c7e581d2e4..23eb5b0853ff28 100644
      --- a/deps/npm/node_modules/path-scurry/dist/cjs/index.js
      +++ b/deps/npm/node_modules/path-scurry/dist/cjs/index.js
      @@ -521,6 +521,29 @@ class PathBase {
           isUnknown() {
               return (this.#type & IFMT) === UNKNOWN;
           }
      +    isType(type) {
      +        return this[`is${type}`]();
      +    }
      +    getType() {
      +        return this.isUnknown()
      +            ? 'Unknown'
      +            : this.isDirectory()
      +                ? 'Directory'
      +                : this.isFile()
      +                    ? 'File'
      +                    : this.isSymbolicLink()
      +                        ? 'SymbolicLink'
      +                        : this.isFIFO()
      +                            ? 'FIFO'
      +                            : this.isCharacterDevice()
      +                                ? 'CharacterDevice'
      +                                : this.isBlockDevice()
      +                                    ? 'BlockDevice'
      +                                    : /* c8 ignore start */ this.isSocket()
      +                                        ? 'Socket'
      +                                        : 'Unknown';
      +        /* c8 ignore stop */
      +    }
           /**
            * Is the Path a regular file?
            */
      diff --git a/deps/npm/node_modules/path-scurry/dist/mjs/index.js b/deps/npm/node_modules/path-scurry/dist/mjs/index.js
      index 957f087c865147..079253a6aee967 100644
      --- a/deps/npm/node_modules/path-scurry/dist/mjs/index.js
      +++ b/deps/npm/node_modules/path-scurry/dist/mjs/index.js
      @@ -493,6 +493,29 @@ export class PathBase {
           isUnknown() {
               return (this.#type & IFMT) === UNKNOWN;
           }
      +    isType(type) {
      +        return this[`is${type}`]();
      +    }
      +    getType() {
      +        return this.isUnknown()
      +            ? 'Unknown'
      +            : this.isDirectory()
      +                ? 'Directory'
      +                : this.isFile()
      +                    ? 'File'
      +                    : this.isSymbolicLink()
      +                        ? 'SymbolicLink'
      +                        : this.isFIFO()
      +                            ? 'FIFO'
      +                            : this.isCharacterDevice()
      +                                ? 'CharacterDevice'
      +                                : this.isBlockDevice()
      +                                    ? 'BlockDevice'
      +                                    : /* c8 ignore start */ this.isSocket()
      +                                        ? 'Socket'
      +                                        : 'Unknown';
      +        /* c8 ignore stop */
      +    }
           /**
            * Is the Path a regular file?
            */
      diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js b/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
      deleted file mode 100644
      index d854bf570d346c..00000000000000
      --- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=R.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};exports.LRUCache=C;
      -//# sourceMappingURL=index.min.js.map
      diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js b/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
      deleted file mode 100644
      index 44bd1c23b86e74..00000000000000
      --- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},R=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof R>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},R=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var W=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=C.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,W))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new R,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof R}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};export{W as LRUCache};
      -//# sourceMappingURL=index.min.js.map
      diff --git a/deps/npm/node_modules/path-scurry/package.json b/deps/npm/node_modules/path-scurry/package.json
      index 5b900825e44e00..af04f807fed2bc 100644
      --- a/deps/npm/node_modules/path-scurry/package.json
      +++ b/deps/npm/node_modules/path-scurry/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "path-scurry",
      -  "version": "1.9.2",
      +  "version": "1.10.1",
         "description": "walk paths fast and efficiently",
         "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
         "main": "./dist/cjs/index.js",
      @@ -64,7 +64,7 @@
           "eslint-config-prettier": "^8.6.0",
           "mkdirp": "^3.0.0",
           "prettier": "^2.8.3",
      -    "rimraf": "^4.1.2",
      +    "rimraf": "^5.0.1",
           "tap": "^16.3.4",
           "ts-node": "^10.9.1",
           "typedoc": "^0.23.24",
      @@ -78,10 +78,10 @@
         },
         "repository": {
           "type": "git",
      -    "url": "git+https://github.com/isaacs/path-walker"
      +    "url": "git+https://github.com/isaacs/path-scurry"
         },
         "dependencies": {
      -    "lru-cache": "^9.1.1",
      -    "minipass": "^5.0.0 || ^6.0.2"
      +    "lru-cache": "^9.1.1 || ^10.0.0",
      +    "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
         }
       }
      diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json
      index 90ab321d51743e..01061f2bc27921 100644
      --- a/deps/npm/node_modules/read-package-json/package.json
      +++ b/deps/npm/node_modules/read-package-json/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "read-package-json",
      -  "version": "6.0.4",
      +  "version": "7.0.0",
         "author": "GitHub Inc.",
         "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
         "repository": {
      @@ -25,12 +25,12 @@
         "dependencies": {
           "glob": "^10.2.2",
           "json-parse-even-better-errors": "^3.0.0",
      -    "normalize-package-data": "^5.0.0",
      +    "normalize-package-data": "^6.0.0",
           "npm-normalize-package-bin": "^3.0.0"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.15.1",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "license": "ISC",
      @@ -39,7 +39,7 @@
           "lib/"
         ],
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         },
         "tap": {
           "branches": 73,
      @@ -53,7 +53,13 @@
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.15.1",
      -    "publish": "true"
      +    "version": "4.18.0",
      +    "publish": "true",
      +    "ciVersions": [
      +      "16.14.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ]
         }
       }
      diff --git a/deps/npm/node_modules/sigstore/README.md b/deps/npm/node_modules/sigstore/README.md
      deleted file mode 100644
      index 2540fa808b9796..00000000000000
      --- a/deps/npm/node_modules/sigstore/README.md
      +++ /dev/null
      @@ -1,165 +0,0 @@
      -# sigstore · [![npm version](https://img.shields.io/npm/v/sigstore.svg?style=flat)](https://www.npmjs.com/package/sigstore) [![CI Status](https://github.com/sigstore/sigstore-js/workflows/CI/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [![Smoke Test Status](https://github.com/sigstore/sigstore-js/workflows/smoke-test/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml)
      -
      -A JavaScript library for generating and verifying Sigstore signatures. One of
      -the intended uses is to sign and verify npm packages but it can be used to sign
      -and verify any file.
      -
      -## Features
      -
      -* Support for signing using an OpenID Connect identity
      -* Support for publishing signatures to a [Rekor][1] instance
      -* Support for verifying Sigstore bundles
      -
      -## Prerequisites
      -
      -- Node.js version >= 14.17.0
      -
      -## Installation
      -
      -```
      -npm install sigstore
      -```
      -
      -## Usage
      -
      -```javascript
      -const { sigstore } = require('sigstore')
      -```
      -
      -```javascript
      -import { sigstore } from 'sigstore'
      -```
      -
      -### sign(payload[, options])
      -
      -Generates a Sigstore signature for the supplied payload. Returns a
      -[Sigstore bundle][2] containing the signature and the verification material
      -necessary to verify the signature.
      -
      -* `payload` ``: The bytes of the artifact to be signed.
      -* `options` ``
      -  * `fulcioURL` ``: The base URL of the Fulcio instance to use for retrieving the signing certificate. Defaults to `'https://fulcio.sigstore.dev'`.
      -  * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`.
      -  * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested.
      -  * `tlogUpload` ``: Flag indicating whether or not the signature should be recorded on the Rekor transparency log. Defaults to `true`.
      -  * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`.
      -  * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`.
      -
      -### attest(payload, payloadType[, options])
      -
      -Generates a Sigstore signature for the supplied in-toto statement. Returns a
      -[Sigstore bundle][2] containing the [DSSE][3]-wrapped statement and signature
      -as well as the verification material necessary to verify the signature.
      -
      -* `payload` ``: The bytes of the statement to be signed.
      -* `payloadType` ``: MIME or content type describing the statement to be signed.
      -* `options` ``
      -  * `fulcioURL` ``: The base URL of the Fulcio instance to use for retrieving the signing certificate. Defaults to `'https://fulcio.sigstore.dev'`.
      -  * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`.
      -  * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested.
      -  * `tlogUpload` ``: Flag indicating whether or not the signed statement should be recorded on the Rekor transparency log. Defaults to `true`.
      -  * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`.
      -  * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`.
      -
      -
      -### verify(bundle[, payload][, options])
      -
      -Verifies the signature in the supplied bundle.
      -
      -* `bundle` ``: The Sigstore bundle containing the signature to be verified and the verification material necessary to verify the signature.
      -* `payload` ``: The bytes of the artifact over which the signature was created. Only necessary when the `sign` function was used to generate the signature since the Bundle does not contain any information about the artifact which was signed. Not required when the `attest` function was used to generate the Bundle.
      -* `options` ``
      -  * `ctLogThreshold` ``: The number of certificate transparency logs on which the signing certificate must appear. Defaults to `1`.
      -  * `tlogThreshold` ``: The number of transparency logs on which the signature must appear. Defaults to `1`.
      -  * `certificateIssuer` ``: Value that must appear in the signing certificate's issuer extension (OID 1.3.6.1.4.1.57264.1.1). Not verified if no value is supplied.
      -  * `certificateIdentityEmail` ``: Email address which must appear in the signing certificate's Subject Alternative Name (SAN) extension. Must be specified in conjunction with the `certificateIssuer` option. Takes precedence over the `certificateIdentityURI` option. Not verified if no value is supplied.
      -  * `certificateIdentityURI` ``: URI which must appear in the signing certificate's Subject Alternative Name (SAN) extension. Must be specified in conjunction with the `certificateIssuer` option. Ignored if the `certificateIdentityEmail` option is set. Not verified if no value is supplied.
      -  * `certificateOIDs` ``: A collection of OID/value pairs which must be present in the certificate's extension list. Not verified if no value is supplied.
      -  * `keySelector` ``: Callback invoked to retrieve the public key (as either `string` or `Buffer`) necessary to verify the bundle signature. Not used when the signature was generated from a Fulcio-issued signing certificate.
      -    * `hint` ``: The hint from the bundle used to identify the the signing key.
      -
      -### tuf
      -
      -The `tuf` object contains utility function for working with the Sigstore TUF repository.
      -
      -#### client([options])
      -
      -Returns a TUF client which can be used to retrieve targets from the Sigstore TUF repository.
      -
      -* `options` ``
      -  * `tufMirrorURL` ``: Base URL for the Sigstore TUF repository. Defaults to `'https://tuf-repo-cdn.sigstore.dev'`
      -  * `tufRootPath` ``: Path to the initial trusted root for the TUF repository. Defaults to the embedded root.
      -  * `tufCachePath` ``: Absolute path to the directory to be used for caching downloaded TUF metadata and targets. Defaults to a directory named "sigstore-js" within the platform-specific application data directory.
      -
      -The returned object exposes a `getTarget(path)` function which returns the
      -contents of the target at the specified path in the Sigstore TUF repository.
      -
      -#### getTarget(path[, options]) (deprecated)
      -
      -Returns the contents of the target at the specified path in the Sigstore TUF repository.
      -This method has been deprecated and will be removed in the next major version.
      -You should use the TUF `client` function to retrieve a stateful TUF client and
      -then call `getTarget` against that object. This will avoid re-initializing the
      -internal TUF state between requests.
      -
      -* `path` ``: The [path-relative-url string](https://url.spec.whatwg.org/#path-relative-url-string) that uniquely identifies the target within the Sigstore TUF repository.
      -* `options` ``
      -  * `tufMirrorURL` ``: Base URL for the Sigstore TUF repository. Defaults to `'https://tuf-repo-cdn.sigstore.dev'`
      -  * `tufRootPath` ``: Path to the initial trusted root for the TUF repository. Defaults to the embedded root.
      -  * `tufCachePath` ``: Absolute path to the directory to be used for caching downloaded TUF metadata and targets. Defaults to a directory named "sigstore-js" within the platform-specific application data directory.
      -
      -
      -### utils
      -
      -The `utils` object contains a few internal utility functions. These are exposed
      -to support the needs of specific `sigstore-js` consumers but should **NOT** be
      -considered part of the stable public interface.
      -
      -## CLI
      -
      -The `sigstore-js` library comes packaged with a basic command line interface
      -for testing and demo purposes. However, the CLI should **NOT** be considered
      -part of the stable interface of the library. If you require a production-ready
      -Sigstore CLI, we recommend you use [`cosign`][4].
      -
      -```shell
      -$ npx sigstore help
      -sigstore  
      -
      -  Usage:
      -
      -  sigstore sign         sign an artifact
      -  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
      -  sigstore verify       verify an artifact
      -  sigstore version      print version information
      -  sigstore help         print help information
      -```
      -
      -## Credential Sources
      -
      -### GitHub Actions
      -
      -If sigstore-js detects that it is being executed on GitHub Actions, it will use `ACTIONS_ID_TOKEN_REQUEST_URL`
      -and `ACTIONS_ID_TOKEN_REQUEST_TOKEN` environment variables to request an OIDC token with the correct scope.
      -
      -Note: the `id_token: write` permission must be granted to the GitHub Action Job.
      -
      -See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect
      -for more details.
      -
      -### Environment Variables
      -
      -If the `SIGSTORE_ID_TOKEN` environment variable is set, it will use this to authenticate to Fulcio.
      -It is the callers responsibility to make sure that this token has the correct scopes.
      -
      -### Interactive Flow
      -
      -If sigstore-js cannot detect ambient credentials, then it will prompt the user to go through the
      -interactive flow.
      -
      -
      -
      -[1]: https://github.com/sigstore/rekor
      -[2]: https://github.com/sigstore/protobuf-specs/blob/9b722b68a717778ba4f11543afa4ef93205ab502/protos/sigstore_bundle.proto#L63-L84
      -[3]: https://github.com/secure-systems-lab/dsse
      -[4]: https://github.com/sigstore/cosign
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/format.d.ts b/deps/npm/node_modules/sigstore/dist/ca/format.d.ts
      deleted file mode 100644
      index b29f51a71f5647..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/format.d.ts
      +++ /dev/null
      @@ -1,5 +0,0 @@
      -/// 
      -/// 
      -import { KeyObject } from 'crypto';
      -import type { SigningCertificateRequest } from '../external/fulcio';
      -export declare function toCertificateRequest(identityToken: string, publicKey: KeyObject, challenge: Buffer): SigningCertificateRequest;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/format.js b/deps/npm/node_modules/sigstore/dist/ca/format.js
      deleted file mode 100644
      index 6374243e80e026..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/format.js
      +++ /dev/null
      @@ -1,20 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.toCertificateRequest = void 0;
      -function toCertificateRequest(identityToken, publicKey, challenge) {
      -    return {
      -        credentials: {
      -            oidcIdentityToken: identityToken,
      -        },
      -        publicKeyRequest: {
      -            publicKey: {
      -                algorithm: 'ECDSA',
      -                content: publicKey
      -                    .export({ format: 'pem', type: 'spki' })
      -                    .toString('ascii'),
      -            },
      -            proofOfPossession: challenge.toString('base64'),
      -        },
      -    };
      -}
      -exports.toCertificateRequest = toCertificateRequest;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/index.d.ts b/deps/npm/node_modules/sigstore/dist/ca/index.d.ts
      deleted file mode 100644
      index 3a6347293aaa8b..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/index.d.ts
      +++ /dev/null
      @@ -1,15 +0,0 @@
      -/// 
      -/// 
      -import { KeyObject } from 'crypto';
      -import type { FetchOptions } from '../types/fetch';
      -export interface CA {
      -    createSigningCertificate: (identityToken: string, publicKey: KeyObject, challenge: Buffer) => Promise;
      -}
      -export type CAClientOptions = {
      -    fulcioBaseURL: string;
      -} & FetchOptions;
      -export declare class CAClient implements CA {
      -    private fulcio;
      -    constructor(options: CAClientOptions);
      -    createSigningCertificate(identityToken: string, publicKey: KeyObject, challenge: Buffer): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/index.js b/deps/npm/node_modules/sigstore/dist/ca/index.js
      deleted file mode 100644
      index 340dd46609aad2..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/index.js
      +++ /dev/null
      @@ -1,39 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.CAClient = void 0;
      -const error_1 = require("../error");
      -const external_1 = require("../external");
      -const format_1 = require("./format");
      -class CAClient {
      -    constructor(options) {
      -        this.fulcio = new external_1.Fulcio({
      -            baseURL: options.fulcioBaseURL,
      -            retry: options.retry,
      -            timeout: options.timeout,
      -        });
      -    }
      -    async createSigningCertificate(identityToken, publicKey, challenge) {
      -        const request = (0, format_1.toCertificateRequest)(identityToken, publicKey, challenge);
      -        try {
      -            const resp = await this.fulcio.createSigningCertificate(request);
      -            // Account for the fact that the response may contain either a
      -            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
      -            const cert = resp.signedCertificateEmbeddedSct
      -                ? resp.signedCertificateEmbeddedSct
      -                : resp.signedCertificateDetachedSct;
      -            // Return the first certificate in the chain, which is the signing
      -            // certificate. Specifically not returning the rest of the chain to
      -            // mitigate the risk of errors when verifying the certificate chain.
      -            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
      -            return cert.chain.certificates.slice(0, 1);
      -        }
      -        catch (err) {
      -            throw new error_1.InternalError({
      -                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
      -                message: 'error creating signing certificate',
      -                cause: err,
      -            });
      -        }
      -    }
      -}
      -exports.CAClient = CAClient;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts
      deleted file mode 100644
      index 0a79b42f714a0f..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts
      +++ /dev/null
      @@ -1,3 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -import { x509Certificate } from '../../x509/cert';
      -export declare function verifyChain(certificate: sigstore.X509Certificate, certificateAuthorities: sigstore.CertificateAuthority[]): x509Certificate[];
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts
      deleted file mode 100644
      index ddf65ff6dfffd8..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -export declare function verifySigningCertificate(bundle: sigstore.BundleWithCertificateChain, trustedRoot: sigstore.TrustedRoot, options: sigstore.CAArtifactVerificationOptions): void;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts
      deleted file mode 100644
      index 29391a74cb65e6..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts
      +++ /dev/null
      @@ -1,3 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -import { x509Certificate } from '../../x509/cert';
      -export declare function verifySCTs(certificateChain: x509Certificate[], ctLogs: sigstore.TransparencyLogInstance[], options: sigstore.ArtifactVerificationOptions_CtlogOptions): void;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts
      deleted file mode 100644
      index 7241b90f6ac5c8..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts
      +++ /dev/null
      @@ -1,3 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -import { x509Certificate } from '../../x509/cert';
      -export declare function verifySignerIdentity(signingCert: x509Certificate, identities: sigstore.CertificateIdentities): void;
      diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
      index 51d722d7631ee0..6f47651b944c94 100644
      --- a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
      +++ b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
      @@ -54,7 +54,10 @@ function verifySignerIdentity(signingCert, identities) {
           // specified identities
           const signerVerified = identities.identities.some((identity) => verifyIdentity(signingCert, identity));
           if (!signerVerified) {
      -        throw new error_1.PolicyError('Certificate issued to untrusted signer');
      +        throw new error_1.PolicyError({
      +            code: 'UNTRUSTED_SIGNER_ERROR',
      +            message: 'Certificate issued to untrusted signer',
      +        });
           }
       }
       exports.verifySignerIdentity = verifySignerIdentity;
      diff --git a/deps/npm/node_modules/sigstore/dist/cli/index.d.ts b/deps/npm/node_modules/sigstore/dist/cli/index.d.ts
      deleted file mode 100644
      index 395f0a5a69d304..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/cli/index.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export declare function processArgv(): Promise;
      diff --git a/deps/npm/node_modules/sigstore/dist/cli/index.js b/deps/npm/node_modules/sigstore/dist/cli/index.js
      deleted file mode 100644
      index 6015cd9df74eac..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/cli/index.js
      +++ /dev/null
      @@ -1,125 +0,0 @@
      -"use strict";
      -var __importDefault = (this && this.__importDefault) || function (mod) {
      -    return (mod && mod.__esModule) ? mod : { "default": mod };
      -};
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.processArgv = void 0;
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const fs_1 = __importDefault(require("fs"));
      -const index_1 = require("../index");
      -const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json';
      -async function cli(args) {
      -    switch (args[0]) {
      -        case 'sign':
      -            await sign(args[1]);
      -            break;
      -        case 'attest':
      -            await attest(args[1], args[2]);
      -            break;
      -        case 'verify':
      -            await verify(args[1], args[2]);
      -            break;
      -        case 'version':
      -        case '-version':
      -        case '--version':
      -        case '-v':
      -            // eslint-disable-next-line @typescript-eslint/no-var-requires
      -            console.log(require('../../package.json').version);
      -            break;
      -        case 'help':
      -        case '--help':
      -        case '-h':
      -        case '-?':
      -            printUsage();
      -            break;
      -        default:
      -            throw 'Unknown command';
      -    }
      -}
      -function printUsage() {
      -    console.log(`sigstore  
      -
      -  Usage:
      -
      -  sigstore sign         sign an artifact
      -  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
      -  sigstore verify       verify an artifact
      -  sigstore version      print version information
      -  sigstore help         print help information
      -  `);
      -}
      -function printRekorEntry(bundle, options) {
      -    let url;
      -    if (options.rekorURL === index_1.sigstore.DEFAULT_REKOR_URL) {
      -        url = `https://search.sigstore.dev`;
      -    }
      -    else {
      -        url = `${options.rekorURL}/api/v1/log/entries`;
      -    }
      -    const logIndex = bundle.verificationMaterial?.tlogEntries[0].logIndex;
      -    console.error(`Created entry at index ${logIndex}, available at`);
      -    console.error(`${url}?logIndex=${logIndex}`);
      -}
      -// TODO: Allow customing these options
      -const signOptions = {
      -    oidcClientID: 'sigstore',
      -    oidcIssuer: 'https://oauth2.sigstore.dev/auth',
      -    oidcRedirectURL: process.env.OIDC_REDIRECT_URL,
      -    rekorURL: index_1.sigstore.DEFAULT_REKOR_URL,
      -};
      -async function sign(artifactPath) {
      -    const buffer = fs_1.default.readFileSync(artifactPath);
      -    const bundle = await index_1.sigstore.sign(buffer, signOptions);
      -    printRekorEntry(bundle, signOptions);
      -    console.log(JSON.stringify(bundle));
      -}
      -async function attest(artifactPath, payloadType = INTOTO_PAYLOAD_TYPE) {
      -    const buffer = fs_1.default.readFileSync(artifactPath);
      -    const bundle = await index_1.sigstore.attest(buffer, payloadType, signOptions);
      -    printRekorEntry(bundle, signOptions);
      -    console.log(JSON.stringify(bundle));
      -}
      -async function verify(bundlePath, artifactPath) {
      -    let payload = undefined;
      -    if (artifactPath) {
      -        payload = fs_1.default.readFileSync(artifactPath);
      -    }
      -    const bundleFile = fs_1.default.readFileSync(bundlePath);
      -    const bundle = JSON.parse(bundleFile.toString('utf-8'));
      -    try {
      -        await index_1.sigstore.verify(bundle, payload, {});
      -        console.error('Verified OK');
      -    }
      -    catch (e) {
      -        console.error('Verification failed');
      -        if (e instanceof Error) {
      -            console.error('Error: ' + e.message);
      -        }
      -        process.exit(1);
      -    }
      -}
      -async function processArgv() {
      -    try {
      -        await cli(process.argv.slice(2));
      -        process.exit(0);
      -    }
      -    catch (e) {
      -        console.error(e);
      -        process.exit(1);
      -    }
      -}
      -exports.processArgv = processArgv;
      diff --git a/deps/npm/node_modules/sigstore/dist/config.d.ts b/deps/npm/node_modules/sigstore/dist/config.d.ts
      deleted file mode 100644
      index 89f42038099530..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/config.d.ts
      +++ /dev/null
      @@ -1,54 +0,0 @@
      -import { CA } from './ca';
      -import { Provider } from './identity';
      -import { TLog } from './tlog';
      -import { TSA } from './tsa';
      -import * as sigstore from './types/sigstore';
      -import type { FetchOptions, Retry } from './types/fetch';
      -import type { KeySelector } from './verify';
      -interface CAOptions {
      -    fulcioURL?: string;
      -}
      -interface TLogOptions {
      -    rekorURL?: string;
      -}
      -interface TSAOptions {
      -    tsaServerURL?: string;
      -}
      -export interface IdentityProviderOptions {
      -    identityToken?: string;
      -    oidcIssuer?: string;
      -    oidcClientID?: string;
      -    oidcClientSecret?: string;
      -    oidcRedirectURL?: string;
      -}
      -export type TUFOptions = {
      -    tufMirrorURL?: string;
      -    tufRootPath?: string;
      -    tufCachePath?: string;
      -} & FetchOptions;
      -export type SignOptions = {
      -    identityProvider?: Provider;
      -    tlogUpload?: boolean;
      -} & CAOptions & TLogOptions & TSAOptions & FetchOptions & IdentityProviderOptions;
      -export type VerifyOptions = {
      -    ctLogThreshold?: number;
      -    tlogThreshold?: number;
      -    certificateIssuer?: string;
      -    certificateIdentityEmail?: string;
      -    certificateIdentityURI?: string;
      -    certificateOIDs?: Record;
      -    keySelector?: KeySelector;
      -} & TLogOptions & TUFOptions;
      -export type CreateVerifierOptions = {
      -    keySelector?: KeySelector;
      -} & TUFOptions;
      -export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev";
      -export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev";
      -export declare const DEFAULT_RETRY: Retry;
      -export declare const DEFAULT_TIMEOUT = 5000;
      -export declare function createCAClient(options: CAOptions & FetchOptions): CA;
      -export declare function createTLogClient(options: TLogOptions & FetchOptions): TLog;
      -export declare function createTSAClient(options: TSAOptions & FetchOptions): TSA | undefined;
      -export declare function artifactVerificationOptions(options: VerifyOptions): sigstore.RequiredArtifactVerificationOptions;
      -export declare function identityProviders(options: IdentityProviderOptions): Provider[];
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/config.js b/deps/npm/node_modules/sigstore/dist/config.js
      index 1a22c5fef313b7..43c236f0eebd07 100644
      --- a/deps/npm/node_modules/sigstore/dist/config.js
      +++ b/deps/npm/node_modules/sigstore/dist/config.js
      @@ -22,11 +22,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
           __setModuleDefault(result, mod);
           return result;
       };
      -var __importDefault = (this && this.__importDefault) || function (mod) {
      -    return (mod && mod.__esModule) ? mod : { "default": mod };
      -};
       Object.defineProperty(exports, "__esModule", { value: true });
      -exports.identityProviders = exports.artifactVerificationOptions = exports.createTSAClient = exports.createTLogClient = exports.createCAClient = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0;
      +exports.artifactVerificationOptions = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
       /*
       Copyright 2023 The Sigstore Authors.
       
      @@ -42,41 +39,72 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       See the License for the specific language governing permissions and
       limitations under the License.
       */
      -const ca_1 = require("./ca");
      -const identity_1 = __importDefault(require("./identity"));
      -const tlog_1 = require("./tlog");
      -const tsa_1 = require("./tsa");
      +const sign_1 = require("@sigstore/sign");
       const sigstore = __importStar(require("./types/sigstore"));
      -exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
      -exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
       exports.DEFAULT_RETRY = { retries: 2 };
       exports.DEFAULT_TIMEOUT = 5000;
      -function createCAClient(options) {
      -    return new ca_1.CAClient({
      -        fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL,
      -        retry: options.retry ?? exports.DEFAULT_RETRY,
      -        timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
      -    });
      +function createBundleBuilder(bundleType, options) {
      +    const bundlerOptions = {
      +        signer: initSigner(options),
      +        witnesses: initWitnesses(options),
      +    };
      +    switch (bundleType) {
      +        case 'messageSignature':
      +            return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
      +        case 'dsseEnvelope':
      +            return new sign_1.DSSEBundleBuilder(bundlerOptions);
      +    }
       }
      -exports.createCAClient = createCAClient;
      -function createTLogClient(options) {
      -    return new tlog_1.TLogClient({
      -        rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
      +exports.createBundleBuilder = createBundleBuilder;
      +// Instantiate the FulcioSigner based on the supplied options.
      +function initSigner(options) {
      +    return new sign_1.FulcioSigner({
      +        fulcioBaseURL: options.fulcioURL,
      +        identityProvider: options.identityProvider || initIdentityProvider(options),
               retry: options.retry ?? exports.DEFAULT_RETRY,
               timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
           });
       }
      -exports.createTLogClient = createTLogClient;
      -function createTSAClient(options) {
      -    return options.tsaServerURL
      -        ? new tsa_1.TSAClient({
      +// Instantiate an identity provider based on the supplied options. If an
      +// explicit identity token is provided, use that. Otherwise, use the CI
      +// context provider.
      +function initIdentityProvider(options) {
      +    const token = options.identityToken;
      +    if (token) {
      +        return { getToken: () => Promise.resolve(token) };
      +    }
      +    else {
      +        return new sign_1.CIContextProvider('sigstore');
      +    }
      +}
      +// Instantiate a collection of witnesses based on the supplied options.
      +function initWitnesses(options) {
      +    const witnesses = [];
      +    if (isRekorEnabled(options)) {
      +        witnesses.push(new sign_1.RekorWitness({
      +            rekorBaseURL: options.rekorURL,
      +            fetchOnConflict: false,
      +            retry: options.retry ?? exports.DEFAULT_RETRY,
      +            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
      +        }));
      +    }
      +    if (isTSAEnabled(options)) {
      +        witnesses.push(new sign_1.TSAWitness({
                   tsaBaseURL: options.tsaServerURL,
                   retry: options.retry ?? exports.DEFAULT_RETRY,
                   timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
      -        })
      -        : undefined;
      +        }));
      +    }
      +    return witnesses;
      +}
      +// Type assertion to ensure that Rekor is enabled
      +function isRekorEnabled(options) {
      +    return options.tlogUpload !== false;
      +}
      +// Type assertion to ensure that TSA is enabled
      +function isTSAEnabled(options) {
      +    return options.tsaServerURL !== undefined;
       }
      -exports.createTSAClient = createTSAClient;
       // Assembles the AtifactVerificationOptions from the supplied VerifyOptions.
       function artifactVerificationOptions(options) {
           // The trusted signers are only used if the options contain a certificate
      @@ -102,7 +130,7 @@ function artifactVerificationOptions(options) {
                       },
                   };
               }
      -        const oids = Object.entries(options.certificateOIDs || {}).map(([oid, value]) => ({
      +        const oids = Object.entries(options.certificateOIDs || /* istanbul ignore next */ {}).map(([oid, value]) => ({
                   oid: { id: oid.split('.').map((s) => parseInt(s, 10)) },
                   value: Buffer.from(value),
               }));
      @@ -122,41 +150,16 @@ function artifactVerificationOptions(options) {
           // Construct the artifact verification options w/ defaults
           return {
               ctlogOptions: {
      -            disable: false,
      -            threshold: options.ctLogThreshold || 1,
      +            disable: options.ctLogThreshold === 0,
      +            threshold: options.ctLogThreshold ?? 1,
                   detachedSct: false,
               },
               tlogOptions: {
      -            disable: false,
      -            threshold: options.tlogThreshold || 1,
      +            disable: options.tlogThreshold === 0,
      +            threshold: options.tlogThreshold ?? 1,
                   performOnlineVerification: false,
               },
               signers,
           };
       }
       exports.artifactVerificationOptions = artifactVerificationOptions;
      -// Translates the IdenityProviderOptions into a list of Providers which
      -// should be queried to retrieve an identity token.
      -function identityProviders(options) {
      -    const idps = [];
      -    const token = options.identityToken;
      -    // If an explicit identity token is provided, use that. Setup a dummy
      -    // provider that just returns the token. Otherwise, setup the CI context
      -    // provider and (optionally) the OAuth provider.
      -    if (token) {
      -        idps.push({ getToken: () => Promise.resolve(token) });
      -    }
      -    else {
      -        idps.push(identity_1.default.ciContextProvider());
      -        if (options.oidcIssuer && options.oidcClientID) {
      -            idps.push(identity_1.default.oauthProvider({
      -                issuer: options.oidcIssuer,
      -                clientID: options.oidcClientID,
      -                clientSecret: options.oidcClientSecret,
      -                redirectURL: options.oidcRedirectURL,
      -            }));
      -        }
      -    }
      -    return idps;
      -}
      -exports.identityProviders = identityProviders;
      diff --git a/deps/npm/node_modules/sigstore/dist/error.d.ts b/deps/npm/node_modules/sigstore/dist/error.d.ts
      deleted file mode 100644
      index c03bbc31697745..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/error.d.ts
      +++ /dev/null
      @@ -1,20 +0,0 @@
      -declare class BaseError extends Error {
      -    cause: any | undefined;
      -    constructor(message: string, cause?: any);
      -}
      -export declare class VerificationError extends BaseError {
      -}
      -export declare class ValidationError extends BaseError {
      -}
      -export declare class PolicyError extends BaseError {
      -}
      -type InternalErrorCode = 'TLOG_FETCH_ENTRY_ERROR' | 'TLOG_CREATE_ENTRY_ERROR' | 'CA_CREATE_SIGNING_CERTIFICATE_ERROR' | 'TSA_CREATE_TIMESTAMP_ERROR' | 'TUF_FIND_TARGET_ERROR' | 'TUF_REFRESH_METADATA_ERROR' | 'TUF_DOWNLOAD_TARGET_ERROR' | 'TUF_READ_TARGET_ERROR';
      -export declare class InternalError extends BaseError {
      -    code: InternalErrorCode;
      -    constructor({ code, message, cause, }: {
      -        code: InternalErrorCode;
      -        message: string;
      -        cause?: any;
      -    });
      -}
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/error.js b/deps/npm/node_modules/sigstore/dist/error.js
      index cee15dff90b614..b0a7dbc83f7105 100644
      --- a/deps/npm/node_modules/sigstore/dist/error.js
      +++ b/deps/npm/node_modules/sigstore/dist/error.js
      @@ -1,6 +1,4 @@
       "use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.InternalError = exports.PolicyError = exports.ValidationError = exports.VerificationError = void 0;
       /*
       Copyright 2023 The Sigstore Authors.
       
      @@ -16,27 +14,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       See the License for the specific language governing permissions and
       limitations under the License.
       */
      -/* eslint-disable @typescript-eslint/no-explicit-any */
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.PolicyError = exports.VerificationError = void 0;
       class BaseError extends Error {
      -    constructor(message, cause) {
      +    constructor({ code, message, cause, }) {
               super(message);
               this.name = this.constructor.name;
      +        this.code = code;
               this.cause = cause;
           }
       }
       class VerificationError extends BaseError {
      +    constructor(message) {
      +        super({ code: 'VERIFICATION_ERROR', message });
      +    }
       }
       exports.VerificationError = VerificationError;
      -class ValidationError extends BaseError {
      -}
      -exports.ValidationError = ValidationError;
       class PolicyError extends BaseError {
       }
       exports.PolicyError = PolicyError;
      -class InternalError extends BaseError {
      -    constructor({ code, message, cause, }) {
      -        super(message, cause);
      -        this.code = code;
      -    }
      -}
      -exports.InternalError = InternalError;
      diff --git a/deps/npm/node_modules/sigstore/dist/external/error.d.ts b/deps/npm/node_modules/sigstore/dist/external/error.d.ts
      deleted file mode 100644
      index 87a4bc5451a3de..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/error.d.ts
      +++ /dev/null
      @@ -1,10 +0,0 @@
      -import fetch from 'make-fetch-happen';
      -type Response = Awaited>;
      -export declare class HTTPError extends Error {
      -    response: Response;
      -    statusCode: number;
      -    location?: string;
      -    constructor(response: Response);
      -}
      -export declare const checkStatus: (response: Response) => Response;
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/external/error.js b/deps/npm/node_modules/sigstore/dist/external/error.js
      deleted file mode 100644
      index d1e1c3df8a8787..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/error.js
      +++ /dev/null
      @@ -1,21 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.checkStatus = exports.HTTPError = void 0;
      -class HTTPError extends Error {
      -    constructor(response) {
      -        super(`HTTP Error: ${response.status} ${response.statusText}`);
      -        this.response = response;
      -        this.statusCode = response.status;
      -        this.location = response.headers?.get('Location') || undefined;
      -    }
      -}
      -exports.HTTPError = HTTPError;
      -const checkStatus = (response) => {
      -    if (response.ok) {
      -        return response;
      -    }
      -    else {
      -        throw new HTTPError(response);
      -    }
      -};
      -exports.checkStatus = checkStatus;
      diff --git a/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts b/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts
      deleted file mode 100644
      index 64b0fc5e347982..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts
      +++ /dev/null
      @@ -1,38 +0,0 @@
      -import type { FetchOptions } from '../types/fetch';
      -export type FulcioOptions = {
      -    baseURL: string;
      -} & FetchOptions;
      -export interface SigningCertificateRequest {
      -    credentials: {
      -        oidcIdentityToken: string;
      -    };
      -    publicKeyRequest: {
      -        publicKey: {
      -            algorithm: string;
      -            content: string;
      -        };
      -        proofOfPossession: string;
      -    };
      -}
      -export interface SigningCertificateResponse {
      -    signedCertificateEmbeddedSct?: {
      -        chain: {
      -            certificates: string[];
      -        };
      -    };
      -    signedCertificateDetachedSct?: {
      -        chain: {
      -            certificates: string[];
      -        };
      -        signedCertificateTimestamp: string;
      -    };
      -}
      -/**
      - * Fulcio API client.
      - */
      -export declare class Fulcio {
      -    private fetch;
      -    private baseUrl;
      -    constructor(options: FulcioOptions);
      -    createSigningCertificate(request: SigningCertificateRequest): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/external/index.d.ts b/deps/npm/node_modules/sigstore/dist/external/index.d.ts
      deleted file mode 100644
      index ef28eca4a951dd..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/index.d.ts
      +++ /dev/null
      @@ -1,4 +0,0 @@
      -export { HTTPError } from './error';
      -export { Fulcio } from './fulcio';
      -export { Rekor } from './rekor';
      -export { TimestampAuthority } from './tsa';
      diff --git a/deps/npm/node_modules/sigstore/dist/external/index.js b/deps/npm/node_modules/sigstore/dist/external/index.js
      deleted file mode 100644
      index f40816e9b7ca40..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/index.js
      +++ /dev/null
      @@ -1,26 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.TimestampAuthority = exports.Rekor = exports.Fulcio = exports.HTTPError = void 0;
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -var error_1 = require("./error");
      -Object.defineProperty(exports, "HTTPError", { enumerable: true, get: function () { return error_1.HTTPError; } });
      -var fulcio_1 = require("./fulcio");
      -Object.defineProperty(exports, "Fulcio", { enumerable: true, get: function () { return fulcio_1.Fulcio; } });
      -var rekor_1 = require("./rekor");
      -Object.defineProperty(exports, "Rekor", { enumerable: true, get: function () { return rekor_1.Rekor; } });
      -var tsa_1 = require("./tsa");
      -Object.defineProperty(exports, "TimestampAuthority", { enumerable: true, get: function () { return tsa_1.TimestampAuthority; } });
      diff --git a/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts b/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts
      deleted file mode 100644
      index 6729ad3e2aacf6..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts
      +++ /dev/null
      @@ -1,41 +0,0 @@
      -import type { LogEntry, ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, InclusionProof as RekorInclusionProof, SearchIndex, SearchLogQuery } from '@sigstore/rekor-types';
      -import type { FetchOptions } from '../types/fetch';
      -export type { ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, RekorInclusionProof, SearchIndex, SearchLogQuery, };
      -export type Entry = {
      -    uuid: string;
      -} & LogEntry['x'];
      -export type RekorOptions = {
      -    baseURL: string;
      -} & FetchOptions;
      -/**
      - * Rekor API client.
      - */
      -export declare class Rekor {
      -    private fetch;
      -    private baseUrl;
      -    constructor(options: RekorOptions);
      -    /**
      -     * Create a new entry in the Rekor log.
      -     * @param propsedEntry {ProposedEntry} Data to create a new entry
      -     * @returns {Promise} The created entry
      -     */
      -    createEntry(propsedEntry: ProposedEntry): Promise;
      -    /**
      -     * Get an entry from the Rekor log.
      -     * @param uuid {string} The UUID of the entry to retrieve
      -     * @returns {Promise} The retrieved entry
      -     */
      -    getEntry(uuid: string): Promise;
      -    /**
      -     * Search the Rekor log index for entries matching the given query.
      -     * @param opts {SearchIndex} Options to search the Rekor log
      -     * @returns {Promise} UUIDs of matching entries
      -     */
      -    searchIndex(opts: SearchIndex): Promise;
      -    /**
      -     * Search the Rekor logs for matching the given query.
      -     * @param opts {SearchLogQuery} Query to search the Rekor log
      -     * @returns {Promise} List of matching entries
      -     */
      -    searchLog(opts: SearchLogQuery): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts b/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts
      deleted file mode 100644
      index 9b5f31151a83d8..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts
      +++ /dev/null
      @@ -1,18 +0,0 @@
      -/// 
      -import type { FetchOptions } from '../types/fetch';
      -export interface TimestampRequest {
      -    artifactHash: string;
      -    hashAlgorithm: string;
      -    certificates?: boolean;
      -    nonce?: number;
      -    tsaPolicyOID?: string;
      -}
      -export type TimestampAuthorityOptions = {
      -    baseURL: string;
      -} & FetchOptions;
      -export declare class TimestampAuthority {
      -    private fetch;
      -    private baseUrl;
      -    constructor(options: TimestampAuthorityOptions);
      -    createTimestamp(request: TimestampRequest): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts b/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts
      deleted file mode 100644
      index 428606f26524bb..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts
      +++ /dev/null
      @@ -1,11 +0,0 @@
      -import { Provider } from './provider';
      -/**
      - * CIContextProvider is a composite identity provider which will iterate
      - * over all of the CI-specific providers and return the token from the first
      - * one that resolves.
      - */
      -export declare class CIContextProvider implements Provider {
      -    private audience;
      -    constructor(audience: string);
      -    getToken(): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/index.d.ts b/deps/npm/node_modules/sigstore/dist/identity/index.d.ts
      deleted file mode 100644
      index 3eb0b444d120ff..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/index.d.ts
      +++ /dev/null
      @@ -1,30 +0,0 @@
      -import { Provider } from './provider';
      -/**
      - * oauthProvider returns a new Provider instance which attempts to retrieve
      - * an identity token from the configured OAuth2 issuer.
      - *
      - * @param issuer Base URL of the issuer
      - * @param clientID Client ID for the issuer
      - * @param clientSecret Client secret for the issuer (optional)
      - * @returns {Provider}
      - */
      -declare function oauthProvider(options: {
      -    issuer: string;
      -    clientID: string;
      -    clientSecret?: string;
      -    redirectURL?: string;
      -}): Provider;
      -/**
      - * ciContextProvider returns a new Provider instance which attempts to retrieve
      - * an identity token from the CI context.
      - *
      - * @param audience audience claim for the generated token
      - * @returns {Provider}
      - */
      -declare function ciContextProvider(audience?: string): Provider;
      -declare const _default: {
      -    ciContextProvider: typeof ciContextProvider;
      -    oauthProvider: typeof oauthProvider;
      -};
      -export default _default;
      -export { Provider } from './provider';
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/index.js b/deps/npm/node_modules/sigstore/dist/identity/index.js
      deleted file mode 100644
      index 351d607106700f..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/index.js
      +++ /dev/null
      @@ -1,51 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const ci_1 = require("./ci");
      -const issuer_1 = require("./issuer");
      -const oauth_1 = require("./oauth");
      -/**
      - * oauthProvider returns a new Provider instance which attempts to retrieve
      - * an identity token from the configured OAuth2 issuer.
      - *
      - * @param issuer Base URL of the issuer
      - * @param clientID Client ID for the issuer
      - * @param clientSecret Client secret for the issuer (optional)
      - * @returns {Provider}
      - */
      -function oauthProvider(options) {
      -    return new oauth_1.OAuthProvider({
      -        issuer: new issuer_1.Issuer(options.issuer),
      -        clientID: options.clientID,
      -        clientSecret: options.clientSecret,
      -        redirectURL: options.redirectURL,
      -    });
      -}
      -/**
      - * ciContextProvider returns a new Provider instance which attempts to retrieve
      - * an identity token from the CI context.
      - *
      - * @param audience audience claim for the generated token
      - * @returns {Provider}
      - */
      -function ciContextProvider(audience = 'sigstore') {
      -    return new ci_1.CIContextProvider(audience);
      -}
      -exports.default = {
      -    ciContextProvider,
      -    oauthProvider,
      -};
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts b/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts
      deleted file mode 100644
      index 37ad713f4d89a7..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts
      +++ /dev/null
      @@ -1,15 +0,0 @@
      -/**
      - * The Issuer reperesents a single OAuth2 provider.
      - *
      - * The Issuer is configured with a provider's base OAuth2 endpoint which is
      - * used to retrieve the associated configuration information.
      - */
      -export declare class Issuer {
      -    private baseURL;
      -    private fetch;
      -    private config?;
      -    constructor(baseURL: string);
      -    authEndpoint(): Promise;
      -    tokenEndpoint(): Promise;
      -    private loadOpenIDConfig;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/issuer.js b/deps/npm/node_modules/sigstore/dist/identity/issuer.js
      deleted file mode 100644
      index 2bf6c20f34932d..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/issuer.js
      +++ /dev/null
      @@ -1,53 +0,0 @@
      -"use strict";
      -var __importDefault = (this && this.__importDefault) || function (mod) {
      -    return (mod && mod.__esModule) ? mod : { "default": mod };
      -};
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.Issuer = void 0;
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
      -// Standard endpoint for retrieving OpenID configuration information
      -const OPENID_CONFIG_PATH = '/.well-known/openid-configuration';
      -/**
      - * The Issuer reperesents a single OAuth2 provider.
      - *
      - * The Issuer is configured with a provider's base OAuth2 endpoint which is
      - * used to retrieve the associated configuration information.
      - */
      -class Issuer {
      -    constructor(baseURL) {
      -        this.baseURL = baseURL;
      -        this.fetch = make_fetch_happen_1.default.defaults({ retry: 2 });
      -    }
      -    async authEndpoint() {
      -        if (!this.config) {
      -            this.config = await this.loadOpenIDConfig();
      -        }
      -        return this.config.authorization_endpoint;
      -    }
      -    async tokenEndpoint() {
      -        if (!this.config) {
      -            this.config = await this.loadOpenIDConfig();
      -        }
      -        return this.config.token_endpoint;
      -    }
      -    async loadOpenIDConfig() {
      -        const url = `${this.baseURL}${OPENID_CONFIG_PATH}`;
      -        return this.fetch(url).then((res) => res.json());
      -    }
      -}
      -exports.Issuer = Issuer;
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts b/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts
      deleted file mode 100644
      index 3c9fae9ac15387..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts
      +++ /dev/null
      @@ -1,26 +0,0 @@
      -import { Issuer } from './issuer';
      -import { Provider } from './provider';
      -interface OAuthProviderOptions {
      -    issuer: Issuer;
      -    clientID: string;
      -    clientSecret?: string;
      -    redirectURL?: string;
      -}
      -export declare class OAuthProvider implements Provider {
      -    private clientID;
      -    private clientSecret;
      -    private issuer;
      -    private codeVerifier;
      -    private state;
      -    private redirectURI?;
      -    constructor(options: OAuthProviderOptions);
      -    getToken(): Promise;
      -    private initiateAuthRequest;
      -    private getIDToken;
      -    private getBasicAuthHeaderValue;
      -    private getAuthRequestURL;
      -    private getAuthRequestParams;
      -    private getCodeChallenge;
      -    private openURL;
      -}
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/oauth.js b/deps/npm/node_modules/sigstore/dist/identity/oauth.js
      deleted file mode 100644
      index 7cb5a00cdb6942..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/oauth.js
      +++ /dev/null
      @@ -1,197 +0,0 @@
      -"use strict";
      -var __importDefault = (this && this.__importDefault) || function (mod) {
      -    return (mod && mod.__esModule) ? mod : { "default": mod };
      -};
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.OAuthProvider = void 0;
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const assert_1 = __importDefault(require("assert"));
      -const child_process_1 = __importDefault(require("child_process"));
      -const http_1 = __importDefault(require("http"));
      -const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
      -const url_1 = require("url");
      -const util_1 = require("../util");
      -class OAuthProvider {
      -    constructor(options) {
      -        this.clientID = options.clientID;
      -        this.clientSecret = options.clientSecret || '';
      -        this.issuer = options.issuer;
      -        this.redirectURI = options.redirectURL;
      -        this.codeVerifier = generateRandomString(32);
      -        this.state = generateRandomString(16);
      -    }
      -    async getToken() {
      -        const authCode = await this.initiateAuthRequest();
      -        return this.getIDToken(authCode);
      -    }
      -    // Initates the authorization request. This will start an HTTP server to
      -    // receive the post-auth redirect and then open the user's default browser to
      -    // the provider's authorization page.
      -    async initiateAuthRequest() {
      -        const server = http_1.default.createServer();
      -        const sockets = new Set();
      -        // Start server and wait till it is listening. If a redirect URL was
      -        // provided, use that. Otherwise, use a random port and construct the
      -        // redirect URL.
      -        await new Promise((resolve) => {
      -            if (this.redirectURI) {
      -                const url = new url_1.URL(this.redirectURI);
      -                server.listen(Number(url.port), url.hostname, resolve);
      -            }
      -            else {
      -                server.listen(0, resolve);
      -                // Get port the server is listening on and construct the server URL
      -                const port = server.address().port;
      -                this.redirectURI = `http://localhost:${port}`;
      -            }
      -        });
      -        // Keep track of connections to the server so we can force a shutdown
      -        server.on('connection', (socket) => {
      -            sockets.add(socket);
      -            socket.once('close', () => {
      -                sockets.delete(socket);
      -            });
      -        });
      -        const result = new Promise((resolve, reject) => {
      -            // Set-up handler for post-auth redirect
      -            server.on('request', (req, res) => {
      -                if (!req.url) {
      -                    reject('invalid server request');
      -                    return;
      -                }
      -                res.writeHead(200);
      -                res.end('Auth Successful');
      -                // Parse incoming request URL
      -                const query = new url_1.URL(req.url, this.redirectURI).searchParams;
      -                // Check to see if the state matches
      -                if (query.get('state') !== this.state) {
      -                    reject('invalid state value');
      -                    return;
      -                }
      -                const authCode = query.get('code');
      -                // Force-close any open connections to the server so we can get a
      -                // clean shutdown
      -                for (const socket of sockets) {
      -                    socket.destroy();
      -                    sockets.delete(socket);
      -                }
      -                // Return auth code once we've shutdown server
      -                server.close(() => {
      -                    if (!authCode) {
      -                        reject('authorization code not found');
      -                    }
      -                    else {
      -                        resolve(authCode);
      -                    }
      -                });
      -            });
      -        });
      -        try {
      -            // Open browser to start authorization request
      -            const authBaseURL = await this.issuer.authEndpoint();
      -            const authURL = this.getAuthRequestURL(authBaseURL);
      -            await this.openURL(authURL);
      -        }
      -        catch (err) {
      -            // Prevent leaked server handler on error
      -            server.close();
      -            throw err;
      -        }
      -        return result;
      -    }
      -    // Uses the provided authorization code, to retrieve the ID token from the
      -    // provider
      -    async getIDToken(authCode) {
      -        (0, assert_1.default)(this.redirectURI);
      -        const tokenEndpointURL = await this.issuer.tokenEndpoint();
      -        const params = new url_1.URLSearchParams();
      -        params.append('grant_type', 'authorization_code');
      -        params.append('code', authCode);
      -        params.append('redirect_uri', this.redirectURI);
      -        params.append('code_verifier', this.codeVerifier);
      -        const response = await (0, make_fetch_happen_1.default)(tokenEndpointURL, {
      -            method: 'POST',
      -            headers: { Authorization: `Basic ${this.getBasicAuthHeaderValue()}` },
      -            body: params,
      -        }).then((r) => r.json());
      -        return response.id_token;
      -    }
      -    // Construct the basic auth header value from the client ID and secret
      -    getBasicAuthHeaderValue() {
      -        return util_1.encoding.base64Encode(`${this.clientID}:${this.clientSecret}`);
      -    }
      -    // Generate starting URL for authorization request
      -    getAuthRequestURL(baseURL) {
      -        const params = this.getAuthRequestParams();
      -        return `${baseURL}?${params.toString()}`;
      -    }
      -    // Collect parameters for authorization request
      -    getAuthRequestParams() {
      -        (0, assert_1.default)(this.redirectURI);
      -        const codeChallenge = this.getCodeChallenge();
      -        return new url_1.URLSearchParams({
      -            response_type: 'code',
      -            client_id: this.clientID,
      -            client_secret: this.clientSecret,
      -            scope: 'openid email',
      -            redirect_uri: this.redirectURI,
      -            code_challenge: codeChallenge,
      -            code_challenge_method: 'S256',
      -            state: this.state,
      -            nonce: generateRandomString(16),
      -        });
      -    }
      -    // Generate code challenge for authorization request
      -    getCodeChallenge() {
      -        return util_1.encoding.base64URLEscape(util_1.crypto.hash(this.codeVerifier).toString('base64'));
      -    }
      -    // Open the supplied URL in the user's default browser
      -    async openURL(url) {
      -        return new Promise((resolve, reject) => {
      -            let open = null;
      -            let command = `"${url}"`;
      -            switch (process.platform) {
      -                case 'darwin':
      -                    open = 'open';
      -                    break;
      -                case 'linux' || 'freebsd' || 'netbsd' || 'openbsd':
      -                    open = 'xdg-open';
      -                    break;
      -                case 'win32':
      -                    open = 'start';
      -                    command = `"" ${command}`;
      -                    break;
      -                default:
      -                    return reject(`OAuth: unsupported platform: ${process.platform}`);
      -            }
      -            console.error(`Your browser will now be opened to: ${url}`);
      -            child_process_1.default.exec(`${open} ${command}`, undefined, (err) => {
      -                if (err) {
      -                    reject(err);
      -                }
      -                else {
      -                    resolve();
      -                }
      -            });
      -        });
      -    }
      -}
      -exports.OAuthProvider = OAuthProvider;
      -// Generate random code verifier value
      -function generateRandomString(len) {
      -    return util_1.encoding.base64URLEscape(util_1.crypto.randomBytes(len).toString('base64'));
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts b/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts
      deleted file mode 100644
      index 95ec03e9ffff6c..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts
      +++ /dev/null
      @@ -1,3 +0,0 @@
      -export interface Provider {
      -    getToken: () => Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/index.d.ts b/deps/npm/node_modules/sigstore/dist/index.d.ts
      deleted file mode 100644
      index dbac0640092b04..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/index.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -export type { Provider as IdentityProvider } from './identity';
      -export * as sigstore from './sigstore';
      diff --git a/deps/npm/node_modules/sigstore/dist/index.js b/deps/npm/node_modules/sigstore/dist/index.js
      index 126fce58e45bde..341c1fa504d1e8 100644
      --- a/deps/npm/node_modules/sigstore/dist/index.js
      +++ b/deps/npm/node_modules/sigstore/dist/index.js
      @@ -1,27 +1,34 @@
       "use strict";
      -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    var desc = Object.getOwnPropertyDescriptor(m, k);
      -    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
      -      desc = { enumerable: true, get: function() { return m[k]; } };
      -    }
      -    Object.defineProperty(o, k2, desc);
      -}) : (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    o[k2] = m[k];
      -}));
      -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
      -    Object.defineProperty(o, "default", { enumerable: true, value: v });
      -}) : function(o, v) {
      -    o["default"] = v;
      -});
      -var __importStar = (this && this.__importStar) || function (mod) {
      -    if (mod && mod.__esModule) return mod;
      -    var result = {};
      -    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
      -    __setModuleDefault(result, mod);
      -    return result;
      -};
       Object.defineProperty(exports, "__esModule", { value: true });
      -exports.sigstore = void 0;
      -exports.sigstore = __importStar(require("./sigstore"));
      +exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0;
      +/*
      +Copyright 2022 The Sigstore Authors.
      +
      +Licensed under the Apache License, Version 2.0 (the "License");
      +you may not use this file except in compliance with the License.
      +You may obtain a copy of the License at
      +
      +    http://www.apache.org/licenses/LICENSE-2.0
      +
      +Unless required by applicable law or agreed to in writing, software
      +distributed under the License is distributed on an "AS IS" BASIS,
      +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      +See the License for the specific language governing permissions and
      +limitations under the License.
      +*/
      +var bundle_1 = require("@sigstore/bundle");
      +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
      +var sign_1 = require("@sigstore/sign");
      +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } });
      +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } });
      +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
      +var tuf_1 = require("@sigstore/tuf");
      +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
      +var error_1 = require("./error");
      +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
      +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
      +var sigstore_1 = require("./sigstore");
      +Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } });
      +Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } });
      +Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } });
      +Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } });
      diff --git a/deps/npm/node_modules/sigstore/dist/sign.d.ts b/deps/npm/node_modules/sigstore/dist/sign.d.ts
      deleted file mode 100644
      index 7d903c06e120a0..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/sign.d.ts
      +++ /dev/null
      @@ -1,28 +0,0 @@
      -/// 
      -import { SignerFunc } from './types/signature';
      -import * as sigstore from './types/sigstore';
      -import type { CA } from './ca';
      -import type { Provider } from './identity';
      -import type { TLog } from './tlog';
      -import type { TSA } from './tsa';
      -export interface SignOptions {
      -    ca: CA;
      -    tlog: TLog;
      -    tsa?: TSA;
      -    identityProviders: Provider[];
      -    tlogUpload?: boolean;
      -    signer?: SignerFunc;
      -}
      -export declare class Signer {
      -    private ca;
      -    private tlog;
      -    private tsa?;
      -    private tlogUpload;
      -    private signer;
      -    private identityProviders;
      -    constructor(options: SignOptions);
      -    signBlob(payload: Buffer): Promise;
      -    signAttestation(payload: Buffer, payloadType: string): Promise;
      -    private signWithEphemeralKey;
      -    private getIdentityToken;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/sign.js b/deps/npm/node_modules/sigstore/dist/sign.js
      deleted file mode 100644
      index 96e6272750b493..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/sign.js
      +++ /dev/null
      @@ -1,120 +0,0 @@
      -"use strict";
      -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    var desc = Object.getOwnPropertyDescriptor(m, k);
      -    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
      -      desc = { enumerable: true, get: function() { return m[k]; } };
      -    }
      -    Object.defineProperty(o, k2, desc);
      -}) : (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    o[k2] = m[k];
      -}));
      -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
      -    Object.defineProperty(o, "default", { enumerable: true, value: v });
      -}) : function(o, v) {
      -    o["default"] = v;
      -});
      -var __importStar = (this && this.__importStar) || function (mod) {
      -    if (mod && mod.__esModule) return mod;
      -    var result = {};
      -    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
      -    __setModuleDefault(result, mod);
      -    return result;
      -};
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.Signer = void 0;
      -const sigstore = __importStar(require("./types/sigstore"));
      -const util_1 = require("./util");
      -class Signer {
      -    constructor(options) {
      -        this.identityProviders = [];
      -        this.ca = options.ca;
      -        this.tlog = options.tlog;
      -        this.tsa = options.tsa;
      -        this.identityProviders = options.identityProviders;
      -        this.tlogUpload = options.tlogUpload ?? true;
      -        this.signer = options.signer || this.signWithEphemeralKey.bind(this);
      -    }
      -    async signBlob(payload) {
      -        // Get signature and verification material for payload
      -        const sigMaterial = await this.signer(payload);
      -        // Calculate artifact digest
      -        const digest = util_1.crypto.hash(payload);
      -        // Create a Rekor entry (if tlogUpload is enabled)
      -        const entry = this.tlogUpload
      -            ? await this.tlog.createMessageSignatureEntry(digest, sigMaterial)
      -            : undefined;
      -        return sigstore.toMessageSignatureBundle({
      -            digest,
      -            signature: sigMaterial,
      -            tlogEntry: entry,
      -            timestamp: this.tsa
      -                ? await this.tsa.createTimestamp(sigMaterial.signature)
      -                : undefined,
      -        });
      -    }
      -    async signAttestation(payload, payloadType) {
      -        // Pre-authentication encoding to be signed
      -        const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
      -        // Get signature and verification material for pae
      -        const sigMaterial = await this.signer(paeBuffer);
      -        const envelope = {
      -            payloadType,
      -            payload: payload,
      -            signatures: [
      -                {
      -                    keyid: sigMaterial.key?.id || '',
      -                    sig: sigMaterial.signature,
      -                },
      -            ],
      -        };
      -        // Create a Rekor entry (if tlogUpload is enabled)
      -        const entry = this.tlogUpload
      -            ? await this.tlog.createDSSEEntry(envelope, sigMaterial)
      -            : undefined;
      -        return sigstore.toDSSEBundle({
      -            envelope,
      -            signature: sigMaterial,
      -            tlogEntry: entry,
      -            timestamp: this.tsa
      -                ? await this.tsa.createTimestamp(sigMaterial.signature)
      -                : undefined,
      -        });
      -    }
      -    async signWithEphemeralKey(payload) {
      -        // Create emphemeral key pair
      -        const keypair = util_1.crypto.generateKeyPair();
      -        // Retrieve identity token from one of the supplied identity providers
      -        const identityToken = await this.getIdentityToken();
      -        // Extract challenge claim from OIDC token
      -        const subject = util_1.oidc.extractJWTSubject(identityToken);
      -        // Construct challenge value by encrypting subject with private key
      -        const challenge = util_1.crypto.signBlob(Buffer.from(subject), keypair.privateKey);
      -        // Create signing certificate
      -        const certificates = await this.ca.createSigningCertificate(identityToken, keypair.publicKey, challenge);
      -        // Generate artifact signature
      -        const signature = util_1.crypto.signBlob(payload, keypair.privateKey);
      -        return {
      -            signature,
      -            certificates,
      -            key: undefined,
      -        };
      -    }
      -    async getIdentityToken() {
      -        const aggErrs = [];
      -        for (const provider of this.identityProviders) {
      -            try {
      -                const token = await provider.getToken();
      -                if (token) {
      -                    return token;
      -                }
      -            }
      -            catch (err) {
      -                aggErrs.push(err);
      -            }
      -        }
      -        throw new Error(`Identity token providers failed: ${aggErrs}`);
      -    }
      -}
      -exports.Signer = Signer;
      diff --git a/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts b/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts
      deleted file mode 100644
      index 38f15dc7340d29..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts
      +++ /dev/null
      @@ -1,8 +0,0 @@
      -/// 
      -import { SignOptions } from './config';
      -import { SignerFunc } from './types/signature';
      -import * as sigstore from './types/sigstore';
      -export declare function createDSSEEnvelope(payload: Buffer, payloadType: string, options: {
      -    signer: SignerFunc;
      -}): Promise;
      -export declare function createRekorEntry(dsseEnvelope: sigstore.SerializedEnvelope, publicKey: string, options?: SignOptions): Promise;
      diff --git a/deps/npm/node_modules/sigstore/dist/sigstore-utils.js b/deps/npm/node_modules/sigstore/dist/sigstore-utils.js
      deleted file mode 100644
      index dc75692f40bf02..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/sigstore-utils.js
      +++ /dev/null
      @@ -1,80 +0,0 @@
      -"use strict";
      -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    var desc = Object.getOwnPropertyDescriptor(m, k);
      -    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
      -      desc = { enumerable: true, get: function() { return m[k]; } };
      -    }
      -    Object.defineProperty(o, k2, desc);
      -}) : (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    o[k2] = m[k];
      -}));
      -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
      -    Object.defineProperty(o, "default", { enumerable: true, value: v });
      -}) : function(o, v) {
      -    o["default"] = v;
      -});
      -var __importStar = (this && this.__importStar) || function (mod) {
      -    if (mod && mod.__esModule) return mod;
      -    var result = {};
      -    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
      -    __setModuleDefault(result, mod);
      -    return result;
      -};
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.createRekorEntry = exports.createDSSEEnvelope = void 0;
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const config_1 = require("./config");
      -const signature_1 = require("./types/signature");
      -const sigstore = __importStar(require("./types/sigstore"));
      -const util_1 = require("./util");
      -async function createDSSEEnvelope(payload, payloadType, options) {
      -    // Pre-authentication encoding to be signed
      -    const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
      -    // Get signature and verification material for pae
      -    const sigMaterial = await options.signer(paeBuffer);
      -    const envelope = {
      -        payloadType,
      -        payload,
      -        signatures: [
      -            {
      -                keyid: sigMaterial.key?.id || '',
      -                sig: sigMaterial.signature,
      -            },
      -        ],
      -    };
      -    return sigstore.Envelope.toJSON(envelope);
      -}
      -exports.createDSSEEnvelope = createDSSEEnvelope;
      -// Accepts a signed DSSE envelope and a PEM-encoded public key to be added to the
      -// transparency log. Returns a Sigstore bundle suitable for offline verification.
      -async function createRekorEntry(dsseEnvelope, publicKey, options = {}) {
      -    const envelope = sigstore.Envelope.fromJSON(dsseEnvelope);
      -    const tlog = (0, config_1.createTLogClient)(options);
      -    const sigMaterial = (0, signature_1.extractSignatureMaterial)(envelope, publicKey);
      -    const entry = await tlog.createDSSEEntry(envelope, sigMaterial, {
      -        fetchOnConflict: true,
      -    });
      -    const bundle = sigstore.toDSSEBundle({
      -        envelope,
      -        signature: sigMaterial,
      -        tlogEntry: entry,
      -    });
      -    return sigstore.bundleToJSON(bundle);
      -}
      -exports.createRekorEntry = createRekorEntry;
      diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts b/deps/npm/node_modules/sigstore/dist/sigstore.d.ts
      deleted file mode 100644
      index 1da5e8ecc5fe5b..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts
      +++ /dev/null
      @@ -1,23 +0,0 @@
      -/// 
      -import * as tuf from '@sigstore/tuf';
      -import * as config from './config';
      -import * as sigstore from './types/sigstore';
      -export declare function sign(payload: Buffer, options?: config.SignOptions): Promise;
      -export declare function attest(payload: Buffer, payloadType: string, options?: config.SignOptions): Promise;
      -export declare function verify(bundle: sigstore.SerializedBundle, payload?: Buffer, options?: config.VerifyOptions): Promise;
      -export interface BundleVerifier {
      -    verify(bundle: sigstore.SerializedBundle): void;
      -}
      -export declare function createVerifier(options: config.CreateVerifierOptions): Promise;
      -declare const tufUtils: {
      -    client: (options?: config.TUFOptions) => Promise;
      -    getTarget: (path: string, options?: config.TUFOptions) => Promise;
      -};
      -export type { TUF } from '@sigstore/tuf';
      -export type { SignOptions, VerifyOptions } from './config';
      -export { InternalError, PolicyError, ValidationError, VerificationError, } from './error';
      -export * as utils from './sigstore-utils';
      -export type { SerializedBundle as Bundle, SerializedEnvelope as Envelope, } from './types/sigstore';
      -export { tufUtils as tuf };
      -export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev";
      -export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev";
      diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.js b/deps/npm/node_modules/sigstore/dist/sigstore.js
      index dca476dd292030..24fff291ab2b7e 100644
      --- a/deps/npm/node_modules/sigstore/dist/sigstore.js
      +++ b/deps/npm/node_modules/sigstore/dist/sigstore.js
      @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
           return result;
       };
       Object.defineProperty(exports, "__esModule", { value: true });
      -exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.tuf = exports.utils = exports.VerificationError = exports.ValidationError = exports.PolicyError = exports.InternalError = exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
      +exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
       /*
       Copyright 2023 The Sigstore Authors.
       
      @@ -39,60 +39,40 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       See the License for the specific language governing permissions and
       limitations under the License.
       */
      +const bundle_1 = require("@sigstore/bundle");
       const tuf = __importStar(require("@sigstore/tuf"));
       const config = __importStar(require("./config"));
      -const sign_1 = require("./sign");
      -const sigstore = __importStar(require("./types/sigstore"));
       const verify_1 = require("./verify");
      -async function sign(payload, options = {}) {
      -    const ca = config.createCAClient(options);
      -    const tlog = config.createTLogClient(options);
      -    const idps = config.identityProviders(options);
      -    const signer = new sign_1.Signer({
      -        ca,
      -        tlog,
      -        identityProviders: options.identityProvider
      -            ? [options.identityProvider]
      -            : idps,
      -        tlogUpload: options.tlogUpload,
      -    });
      -    const bundle = await signer.signBlob(payload);
      -    return sigstore.bundleToJSON(bundle);
      +async function sign(payload,
      +/* istanbul ignore next */
      +options = {}) {
      +    const bundler = config.createBundleBuilder('messageSignature', options);
      +    const bundle = await bundler.create({ data: payload });
      +    return (0, bundle_1.bundleToJSON)(bundle);
       }
       exports.sign = sign;
      -async function attest(payload, payloadType, options = {}) {
      -    const ca = config.createCAClient(options);
      -    const tlog = config.createTLogClient(options);
      -    const tsa = config.createTSAClient(options);
      -    const idps = config.identityProviders(options);
      -    const signer = new sign_1.Signer({
      -        ca,
      -        tlog,
      -        tsa,
      -        identityProviders: options.identityProvider
      -            ? [options.identityProvider]
      -            : idps,
      -        tlogUpload: options.tlogUpload,
      -    });
      -    const bundle = await signer.signAttestation(payload, payloadType);
      -    return sigstore.bundleToJSON(bundle);
      +async function attest(payload, payloadType,
      +/* istanbul ignore next */
      +options = {}) {
      +    const bundler = config.createBundleBuilder('dsseEnvelope', options);
      +    const bundle = await bundler.create({ data: payload, type: payloadType });
      +    return (0, bundle_1.bundleToJSON)(bundle);
       }
       exports.attest = attest;
      -async function verify(bundle, payload, options = {}) {
      -    const trustedRoot = await tuf.getTrustedRoot({
      -        mirrorURL: options.tufMirrorURL,
      -        rootPath: options.tufRootPath,
      -        cachePath: options.tufCachePath,
      -        retry: options.retry ?? config.DEFAULT_RETRY,
      -        timeout: options.timeout ?? config.DEFAULT_TIMEOUT,
      -    });
      -    const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
      -    const deserializedBundle = sigstore.bundleFromJSON(bundle);
      -    const opts = config.artifactVerificationOptions(options);
      -    return verifier.verify(deserializedBundle, opts, payload);
      +async function verify(bundle, dataOrOptions, options) {
      +    let data;
      +    if (Buffer.isBuffer(dataOrOptions)) {
      +        data = dataOrOptions;
      +    }
      +    else {
      +        options = dataOrOptions;
      +    }
      +    return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
       }
       exports.verify = verify;
      -async function createVerifier(options) {
      +async function createVerifier(
      +/* istanbul ignore next */
      +options = {}) {
           const trustedRoot = await tuf.getTrustedRoot({
               mirrorURL: options.tufMirrorURL,
               rootPath: options.tufRootPath,
      @@ -103,44 +83,10 @@ async function createVerifier(options) {
           const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
           const verifyOpts = config.artifactVerificationOptions(options);
           return {
      -        verify: (bundle) => {
      -            const deserializedBundle = sigstore.bundleFromJSON(bundle);
      -            return verifier.verify(deserializedBundle, verifyOpts);
      +        verify: (bundle, payload) => {
      +            const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle);
      +            return verifier.verify(deserializedBundle, verifyOpts, payload);
               },
           };
       }
       exports.createVerifier = createVerifier;
      -const tufUtils = {
      -    client: (options = {}) => {
      -        return tuf.initTUF({
      -            mirrorURL: options.tufMirrorURL,
      -            rootPath: options.tufRootPath,
      -            cachePath: options.tufCachePath,
      -            retry: options.retry,
      -            timeout: options.timeout,
      -        });
      -    },
      -    /*
      -     * @deprecated Use tufUtils.client instead.
      -     */
      -    getTarget: (path, options = {}) => {
      -        return tuf
      -            .initTUF({
      -            mirrorURL: options.tufMirrorURL,
      -            rootPath: options.tufRootPath,
      -            cachePath: options.tufCachePath,
      -            retry: options.retry,
      -            timeout: options.timeout,
      -        })
      -            .then((t) => t.getTarget(path));
      -    },
      -};
      -exports.tuf = tufUtils;
      -var error_1 = require("./error");
      -Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
      -Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
      -Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
      -Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
      -exports.utils = __importStar(require("./sigstore-utils"));
      -exports.DEFAULT_FULCIO_URL = config.DEFAULT_FULCIO_URL;
      -exports.DEFAULT_REKOR_URL = config.DEFAULT_REKOR_URL;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts
      deleted file mode 100644
      index 8a00f546b874fd..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts
      +++ /dev/null
      @@ -1,7 +0,0 @@
      -/// 
      -import { SignatureMaterial } from '../types/signature';
      -import { Envelope } from '../types/sigstore';
      -import type { ProposedDSSEEntry, ProposedHashedRekordEntry, ProposedIntotoEntry } from '../external/rekor';
      -export declare function toProposedDSSEEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedDSSEEntry;
      -export declare function toProposedHashedRekordEntry(digest: Buffer, signature: SignatureMaterial): ProposedHashedRekordEntry;
      -export declare function toProposedIntotoEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedIntotoEntry;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts
      deleted file mode 100644
      index 6bb7d42861dc2c..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts
      +++ /dev/null
      @@ -1,23 +0,0 @@
      -/// 
      -import { SignatureMaterial } from '../types/signature';
      -import * as sigstore from '../types/sigstore';
      -import type { Entry } from '../external/rekor';
      -import type { FetchOptions } from '../types/fetch';
      -interface CreateEntryOptions {
      -    fetchOnConflict?: boolean;
      -}
      -export interface TLog {
      -    createMessageSignatureEntry: (digest: Buffer, sigMaterial: SignatureMaterial) => Promise;
      -    createDSSEEntry: (envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions) => Promise;
      -}
      -export type TLogClientOptions = {
      -    rekorBaseURL: string;
      -} & FetchOptions;
      -export declare class TLogClient implements TLog {
      -    private rekor;
      -    constructor(options: TLogClientOptions);
      -    createMessageSignatureEntry(digest: Buffer, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise;
      -    createDSSEEntry(envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise;
      -    private createEntry;
      -}
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts
      deleted file mode 100644
      index 17de4f5c9698ab..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -export declare function verifyTLogBody(entry: sigstore.VerifiableTransparencyLogEntry, bundleContent: sigstore.Bundle['content']): boolean;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js
      new file mode 100644
      index 00000000000000..f6f35a5cad64dd
      --- /dev/null
      +++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js
      @@ -0,0 +1,148 @@
      +"use strict";
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.verifyCheckpoint = void 0;
      +const error_1 = require("../../error");
      +const util_1 = require("../../util");
      +// Separator between the note and the signatures in a checkpoint
      +const CHECKPOINT_SEPARATOR = '\n\n';
      +// Checkpoint signatures are of the following form:
      +// "–  \n"
      +// where:
      +// - the prefix is an emdash (U+2014).
      +// -  gives a human-readable representation of the signing ID.
      +// -  is the first 4 bytes of the SHA256 hash of the
      +//   associated public key followed by the signature bytes.
      +const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
      +// Verifies the checkpoint value in the given tlog entry. There are two steps
      +// to the verification:
      +// 1. Verify that all signatures in the checkpoint can be verified against a
      +//    trusted public key
      +// 2. Verify that the root hash in the checkpoint matches the root hash in the
      +//    inclusion proof
      +// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
      +function verifyCheckpoint(entry, tlogs) {
      +    // Filter tlog instances to just those which were valid at the time of the
      +    // entry
      +    const validTLogs = filterTLogInstances(tlogs, entry.integratedTime);
      +    const inclusionProof = entry.inclusionProof;
      +    const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
      +    const checkpoint = LogCheckpoint.fromString(signedNote.note);
      +    // Verify that the signatures in the checkpoint are all valid, also check
      +    // that the root hash from the checkpoint matches the root hash in the
      +    // inclusion proof
      +    return (signedNote.verify(validTLogs) &&
      +        util_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash));
      +}
      +exports.verifyCheckpoint = verifyCheckpoint;
      +// SignedNote represents a signed note from a transparency log checkpoint. Consists
      +// of a body (or note) and one more signatures calculated over the body. See
      +// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
      +class SignedNote {
      +    constructor(note, signatures) {
      +        this.note = note;
      +        this.signatures = signatures;
      +    }
      +    // Deserialize a SignedNote from a string
      +    static fromString(envelope) {
      +        if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
      +            throw new error_1.VerificationError('malformed checkpoint: no separator');
      +        }
      +        // Split the note into the header and the data portions at the separator
      +        const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
      +        const header = envelope.slice(0, split + 1);
      +        const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
      +        // Find all the signature lines in the data portion
      +        const matches = data.matchAll(SIGNATURE_REGEX);
      +        // Parse each of the matched signature lines into the name and signature.
      +        // The first four bytes of the signature are the key hint (should match the
      +        // first four bytes of the log ID), and the rest is the signature itself.
      +        const signatures = Array.from(matches, (match) => {
      +            const [, name, signature] = match;
      +            const sigBytes = Buffer.from(signature, 'base64');
      +            if (sigBytes.length < 5) {
      +                throw new error_1.VerificationError('malformed checkpoint: invalid signature');
      +            }
      +            return {
      +                name,
      +                keyHint: sigBytes.subarray(0, 4),
      +                signature: sigBytes.subarray(4),
      +            };
      +        });
      +        if (signatures.length === 0) {
      +            throw new error_1.VerificationError('malformed checkpoint: no signatures');
      +        }
      +        return new SignedNote(header, signatures);
      +    }
      +    // Verifies the signatures in the SignedNote. For each signature, the
      +    // corresponding transparency log is looked up by the key hint and the
      +    // signature is verified against the public key in the transparency log.
      +    // Throws an error if any of the signatures are invalid.
      +    verify(tlogs) {
      +        const data = Buffer.from(this.note, 'utf-8');
      +        return this.signatures.every((signature) => {
      +            // Find the transparency log instance with the matching key hint
      +            const tlog = tlogs.find((tlog) => util_1.crypto.bufferEqual(tlog.logId.keyId.subarray(0, 4), signature.keyHint));
      +            if (!tlog) {
      +                return false;
      +            }
      +            const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
      +            return util_1.crypto.verifyBlob(data, publicKey, signature.signature);
      +        });
      +    }
      +}
      +// LogCheckpoint represents a transparency log checkpoint. Consists of the
      +// following:
      +//  - origin: the name of the transparency log
      +//  - logSize: the size of the log at the time of the checkpoint
      +//  - logHash: the root hash of the log at the time of the checkpoint
      +//  - rest: the rest of the checkpoint body, which is a list of log entries
      +// See:
      +// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
      +class LogCheckpoint {
      +    constructor(origin, logSize, logHash, rest) {
      +        this.origin = origin;
      +        this.logSize = logSize;
      +        this.logHash = logHash;
      +        this.rest = rest;
      +    }
      +    static fromString(note) {
      +        const lines = note.trim().split('\n');
      +        if (lines.length < 4) {
      +            throw new error_1.VerificationError('malformed checkpoint: too few lines in header');
      +        }
      +        const origin = lines[0];
      +        const logSize = BigInt(lines[1]);
      +        const rootHash = Buffer.from(lines[2], 'base64');
      +        const rest = lines.slice(3);
      +        return new LogCheckpoint(origin, logSize, rootHash, rest);
      +    }
      +}
      +// Filter the list of tlog instances to only those which have usable public
      +// keys and were valid at the given time.
      +function filterTLogInstances(tlogInstances, integratedTime) {
      +    const targetDate = new Date(Number(integratedTime) * 1000);
      +    return tlogInstances.filter((tlog) => {
      +        // Must have a log ID
      +        if (!tlog.logId) {
      +            return false;
      +        }
      +        // If the tlog doesn't have a public key, we can't use it
      +        const publicKey = tlog.publicKey;
      +        if (publicKey === undefined) {
      +            return false;
      +        }
      +        // If the tlog doesn't have a rawBytes field, we can't use it
      +        if (publicKey.rawBytes === undefined) {
      +            return false;
      +        }
      +        // If the tlog doesn't have a validFor field, we don't need to check it
      +        const validFor = publicKey.validFor;
      +        if (validFor === undefined) {
      +            return true;
      +        }
      +        // Check that the integrated time is within the validFor range
      +        return (validFor.start !== undefined &&
      +            validFor.start <= targetDate &&
      +            (validFor.end === undefined || targetDate <= validFor.end));
      +    });
      +}
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts
      deleted file mode 100644
      index 4f96f820731f03..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -export declare function verifyTLogEntries(bundle: sigstore.Bundle, trustedRoot: sigstore.TrustedRoot, options: sigstore.ArtifactVerificationOptions_TlogOptions): void;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
      index cbb93133c2685f..9224feffde00b0 100644
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
      +++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
      @@ -1,27 +1,4 @@
       "use strict";
      -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    var desc = Object.getOwnPropertyDescriptor(m, k);
      -    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
      -      desc = { enumerable: true, get: function() { return m[k]; } };
      -    }
      -    Object.defineProperty(o, k2, desc);
      -}) : (function(o, m, k, k2) {
      -    if (k2 === undefined) k2 = k;
      -    o[k2] = m[k];
      -}));
      -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
      -    Object.defineProperty(o, "default", { enumerable: true, value: v });
      -}) : function(o, v) {
      -    o["default"] = v;
      -});
      -var __importStar = (this && this.__importStar) || function (mod) {
      -    if (mod && mod.__esModule) return mod;
      -    var result = {};
      -    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
      -    __setModuleDefault(result, mod);
      -    return result;
      -};
       Object.defineProperty(exports, "__esModule", { value: true });
       exports.verifyTLogEntries = void 0;
       /*
      @@ -39,31 +16,51 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       See the License for the specific language governing permissions and
       limitations under the License.
       */
      +const bundle_1 = require("@sigstore/bundle");
       const error_1 = require("../../error");
      -const sigstore = __importStar(require("../../types/sigstore"));
       const cert_1 = require("../../x509/cert");
       const body_1 = require("./body");
      +const checkpoint_1 = require("./checkpoint");
      +const merkle_1 = require("./merkle");
       const set_1 = require("./set");
       // Verifies that the number of tlog entries that pass offline verification
       // is greater than or equal to the threshold specified in the options.
       function verifyTLogEntries(bundle, trustedRoot, options) {
      +    if (bundle.mediaType === bundle_1.BUNDLE_V01_MEDIA_TYPE) {
      +        (0, bundle_1.assertBundleV01)(bundle);
      +        verifyTLogEntriesForBundleV01(bundle, trustedRoot, options);
      +    }
      +    else {
      +        (0, bundle_1.assertBundleLatest)(bundle);
      +        verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options);
      +    }
      +}
      +exports.verifyTLogEntries = verifyTLogEntries;
      +function verifyTLogEntriesForBundleV01(bundle, trustedRoot, options) {
           if (options.performOnlineVerification) {
               throw new error_1.VerificationError('Online verification not implemented');
           }
           // Extract the signing cert, if available
           const signingCert = signingCertificate(bundle);
           // Iterate over the tlog entries and verify each one
      -    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryOffline(entry, bundle.content, trustedRoot.tlogs, signingCert));
      +    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionPromise(entry, bundle.content, trustedRoot.tlogs, signingCert));
           if (verifiedEntries.length < options.threshold) {
               throw new error_1.VerificationError('tlog verification failed');
           }
       }
      -exports.verifyTLogEntries = verifyTLogEntries;
      -function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
      -    // Check that the TLog entry has the fields necessary for verification
      -    if (!sigstore.isVerifiableTransparencyLogEntry(entry)) {
      -        return false;
      +function verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options) {
      +    if (options.performOnlineVerification) {
      +        throw new error_1.VerificationError('Online verification not implemented');
      +    }
      +    // Extract the signing cert, if available
      +    const signingCert = signingCertificate(bundle);
      +    // Iterate over the tlog entries and verify each one
      +    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionProof(entry, bundle.content, trustedRoot.tlogs, signingCert));
      +    if (verifiedEntries.length < options.threshold) {
      +        throw new error_1.VerificationError('tlog verification failed');
           }
      +}
      +function verifyTLogEntryWithInclusionPromise(entry, bundleContent, tlogs, signingCert) {
           // If there is a signing certificate availble, check that the tlog integrated
           // time is within the certificate's validity period; otherwise, skip this
           // check.
      @@ -74,8 +71,20 @@ function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
               (0, set_1.verifyTLogSET)(entry, tlogs) &&
               verifyTLogIntegrationTime());
       }
      +function verifyTLogEntryWithInclusionProof(entry, bundleContent, tlogs, signingCert) {
      +    // If there is a signing certificate availble, check that the tlog integrated
      +    // time is within the certificate's validity period; otherwise, skip this
      +    // check.
      +    const verifyTLogIntegrationTime = signingCert
      +        ? () => signingCert.validForDate(new Date(Number(entry.integratedTime) * 1000))
      +        : () => true;
      +    return ((0, body_1.verifyTLogBody)(entry, bundleContent) &&
      +        (0, merkle_1.verifyMerkleInclusion)(entry) &&
      +        (0, checkpoint_1.verifyCheckpoint)(entry, tlogs) &&
      +        verifyTLogIntegrationTime());
      +}
       function signingCertificate(bundle) {
      -    if (!sigstore.isBundleWithCertificateChain(bundle)) {
      +    if (!(0, bundle_1.isBundleWithCertificateChain)(bundle)) {
               return undefined;
           }
           const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0];
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts
      deleted file mode 100644
      index a2c47626d01f84..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -export declare function verifyMerkleInclusion(entry: sigstore.TransparencyLogEntry): boolean;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
      index 90609cb73576fe..0f246af4a28a3b 100644
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
      +++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
      @@ -25,9 +25,6 @@ const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
       const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
       function verifyMerkleInclusion(entry) {
           const inclusionProof = entry.inclusionProof;
      -    if (!inclusionProof) {
      -        throw new error_1.VerificationError('tlog entry has no inclusion proof');
      -    }
           const logIndex = BigInt(inclusionProof.logIndex);
           const treeSize = BigInt(inclusionProof.treeSize);
           if (logIndex < 0n || logIndex >= treeSize) {
      @@ -76,13 +73,20 @@ function chainBorderRight(seed, hashes) {
           return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
       }
       function innerProofSize(index, size) {
      -    return (index ^ (size - BigInt(1))).toString(2).length;
      +    return bitLength(index ^ (size - BigInt(1)));
       }
       // Counts the number of ones in the binary representation of the given number.
       // https://en.wikipedia.org/wiki/Hamming_weight
       function onesCount(x) {
           return x.toString(2).split('1').length - 1;
       }
      +// Returns the number of bits necessary to represent an integer in binary.
      +function bitLength(n) {
      +    if (n === 0n) {
      +        return 0;
      +    }
      +    return n.toString(2).length;
      +}
       // Hashing logic according to RFC6962.
       // https://datatracker.ietf.org/doc/html/rfc6962#section-2
       function hashChildren(left, right) {
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts
      deleted file mode 100644
      index 278317489a7e49..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import * as sigstore from '../../types/sigstore';
      -export declare function verifyTLogSET(entry: sigstore.VerifiableTransparencyLogEntry, tlogs: sigstore.TransparencyLogInstance[]): boolean;
      diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
      index 89a544283d73d9..959cd5883f1cad 100644
      --- a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
      +++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
      @@ -11,9 +11,6 @@ function verifyTLogSET(entry, tlogs) {
           const validTLogs = filterTLogInstances(tlogs, entry.logId.keyId, entry.integratedTime);
           // Check to see if we can verify the SET against any of the valid tlogs
           return validTLogs.some((tlog) => {
      -        if (!tlog.publicKey?.rawBytes) {
      -            return false;
      -        }
               const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
               // Re-create the original Rekor verification payload
               const payload = toVerificationPayload(entry);
      @@ -60,7 +57,7 @@ function filterTLogInstances(tlogInstances, logID, integratedTime) {
                   return true;
               }
               // Check that the integrated time is within the validFor range
      -        return (publicKey.validFor.start &&
      +        return (publicKey.validFor.start !== undefined &&
                   publicKey.validFor.start <= targetDate &&
                   (!publicKey.validFor.end || targetDate <= publicKey.validFor.end));
           });
      diff --git a/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts b/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts
      deleted file mode 100644
      index e94b20c075e557..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts
      +++ /dev/null
      @@ -1,13 +0,0 @@
      -/// 
      -import type { FetchOptions } from '../types/fetch';
      -export interface TSA {
      -    createTimestamp: (signature: Buffer) => Promise;
      -}
      -export type TSAClientOptions = {
      -    tsaBaseURL: string;
      -} & FetchOptions;
      -export declare class TSAClient implements TSA {
      -    private tsa;
      -    constructor(options: TSAClientOptions);
      -    createTimestamp(signature: Buffer): Promise;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts b/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts
      deleted file mode 100644
      index 510aeee6a37d72..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts
      +++ /dev/null
      @@ -1,6 +0,0 @@
      -import type { MakeFetchHappenOptions } from 'make-fetch-happen';
      -export type Retry = MakeFetchHappenOptions['retry'];
      -export type FetchOptions = {
      -    retry?: Retry;
      -    timeout?: number | undefined;
      -};
      diff --git a/deps/npm/node_modules/sigstore/dist/types/signature.d.ts b/deps/npm/node_modules/sigstore/dist/types/signature.d.ts
      deleted file mode 100644
      index 40b4fbe6339ca6..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/signature.d.ts
      +++ /dev/null
      @@ -1,16 +0,0 @@
      -/// 
      -import { Envelope } from './sigstore';
      -import { OneOf } from './utility';
      -interface VerificationMaterial {
      -    certificates: string[];
      -    key: {
      -        id?: string;
      -        value: string;
      -    };
      -}
      -export type SignatureMaterial = {
      -    signature: Buffer;
      -} & OneOf;
      -export type SignerFunc = (payload: Buffer) => Promise;
      -export declare function extractSignatureMaterial(dsseEnvelope: Envelope, publicKey: string): SignatureMaterial;
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/types/signature.js b/deps/npm/node_modules/sigstore/dist/types/signature.js
      deleted file mode 100644
      index 339e2a2731b413..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/signature.js
      +++ /dev/null
      @@ -1,15 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.extractSignatureMaterial = void 0;
      -function extractSignatureMaterial(dsseEnvelope, publicKey) {
      -    const signature = dsseEnvelope.signatures[0];
      -    return {
      -        signature: signature.sig,
      -        key: {
      -            id: signature.keyid,
      -            value: publicKey,
      -        },
      -        certificates: undefined,
      -    };
      -}
      -exports.extractSignatureMaterial = extractSignatureMaterial;
      diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore.js b/deps/npm/node_modules/sigstore/dist/types/sigstore.js
      new file mode 100644
      index 00000000000000..36efb67e38a5eb
      --- /dev/null
      +++ b/deps/npm/node_modules/sigstore/dist/types/sigstore.js
      @@ -0,0 +1,27 @@
      +"use strict";
      +/*
      +Copyright 2023 The Sigstore Authors.
      +
      +Licensed under the Apache License, Version 2.0 (the "License");
      +you may not use this file except in compliance with the License.
      +You may obtain a copy of the License at
      +
      +    http://www.apache.org/licenses/LICENSE-2.0
      +
      +Unless required by applicable law or agreed to in writing, software
      +distributed under the License is distributed on an "AS IS" BASIS,
      +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      +See the License for the specific language governing permissions and
      +limitations under the License.
      +*/
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.isCAVerificationOptions = exports.SubjectAlternativeNameType = void 0;
      +// Enums from protobuf-specs
      +var protobuf_specs_1 = require("@sigstore/protobuf-specs");
      +Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_1.SubjectAlternativeNameType; } });
      +function isCAVerificationOptions(options) {
      +    return (options.ctlogOptions !== undefined &&
      +        (options.signers === undefined ||
      +            options.signers.$case === 'certificateIdentities'));
      +}
      +exports.isCAVerificationOptions = isCAVerificationOptions;
      diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts
      deleted file mode 100644
      index 2be598d923048f..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts
      +++ /dev/null
      @@ -1,42 +0,0 @@
      -/// 
      -import { SignatureMaterial } from '../signature';
      -import { ValidBundle } from './validate';
      -import type { ArtifactVerificationOptions, Envelope, TransparencyLogEntry, VerificationMaterial } from '@sigstore/protobuf-specs';
      -import type { Entry } from '../../external/rekor';
      -import type { WithRequired } from '../utility';
      -import type { SerializedBundle } from './serialized';
      -export { Envelope, HashAlgorithm, PublicKeyDetails, SubjectAlternativeNameType, } from '@sigstore/protobuf-specs';
      -export type { ArtifactVerificationOptions, ArtifactVerificationOptions_CtlogOptions, ArtifactVerificationOptions_TlogOptions, CertificateAuthority, CertificateIdentities, CertificateIdentity, MessageSignature, ObjectIdentifierValuePair, PublicKey, PublicKeyIdentifier, RFC3161SignedTimestamp, Signature, SubjectAlternativeName, TimestampVerificationData, TransparencyLogEntry, TransparencyLogInstance, TrustedRoot, X509Certificate, X509CertificateChain, } from '@sigstore/protobuf-specs';
      -export type { SerializedBundle, SerializedEnvelope } from './serialized';
      -export type { ValidBundle as Bundle };
      -export declare const bundleFromJSON: (obj: any) => ValidBundle;
      -export declare const bundleToJSON: (bundle: ValidBundle) => SerializedBundle;
      -export type BundleWithCertificateChain = ValidBundle & {
      -    verificationMaterial: VerificationMaterial & {
      -        content: Extract;
      -    };
      -};
      -export declare function isBundleWithCertificateChain(bundle: ValidBundle): bundle is BundleWithCertificateChain;
      -export type RequiredArtifactVerificationOptions = WithRequired;
      -export type CAArtifactVerificationOptions = WithRequired & {
      -    signers?: Extract;
      -};
      -export declare function isCAVerificationOptions(options: ArtifactVerificationOptions): options is CAArtifactVerificationOptions;
      -export type VerifiableTransparencyLogEntry = WithRequired;
      -export declare function isVerifiableTransparencyLogEntry(entry: TransparencyLogEntry): entry is VerifiableTransparencyLogEntry;
      -export declare function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }: {
      -    envelope: Envelope;
      -    signature: SignatureMaterial;
      -    tlogEntry?: Entry;
      -    timestamp?: Buffer;
      -}): ValidBundle;
      -export declare function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }: {
      -    digest: Buffer;
      -    signature: SignatureMaterial;
      -    tlogEntry?: Entry;
      -    timestamp?: Buffer;
      -}): ValidBundle;
      diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js
      deleted file mode 100644
      index 2c240c865cf37a..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js
      +++ /dev/null
      @@ -1,162 +0,0 @@
      -"use strict";
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.toMessageSignatureBundle = exports.toDSSEBundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.bundleToJSON = exports.bundleFromJSON = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = exports.Envelope = void 0;
      -/*
      -Copyright 2023 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -const protobuf_specs_1 = require("@sigstore/protobuf-specs");
      -const util_1 = require("../../util");
      -const validate_1 = require("./validate");
      -// Enums from protobuf-specs
      -// TODO: Move Envelope to "type" export once @sigstore/sign is a thing
      -var protobuf_specs_2 = require("@sigstore/protobuf-specs");
      -Object.defineProperty(exports, "Envelope", { enumerable: true, get: function () { return protobuf_specs_2.Envelope; } });
      -Object.defineProperty(exports, "HashAlgorithm", { enumerable: true, get: function () { return protobuf_specs_2.HashAlgorithm; } });
      -Object.defineProperty(exports, "PublicKeyDetails", { enumerable: true, get: function () { return protobuf_specs_2.PublicKeyDetails; } });
      -Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_2.SubjectAlternativeNameType; } });
      -// eslint-disable-next-line @typescript-eslint/no-explicit-any
      -const bundleFromJSON = (obj) => {
      -    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
      -    (0, validate_1.assertValidBundle)(bundle);
      -    return bundle;
      -};
      -exports.bundleFromJSON = bundleFromJSON;
      -// eslint-disable-next-line @typescript-eslint/no-explicit-any
      -const bundleToJSON = (bundle) => {
      -    return protobuf_specs_1.Bundle.toJSON(bundle);
      -};
      -exports.bundleToJSON = bundleToJSON;
      -const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
      -// Type guard for narrowing a Bundle to a BundleWithCertificateChain
      -function isBundleWithCertificateChain(bundle) {
      -    return (bundle.verificationMaterial.content !== undefined &&
      -        bundle.verificationMaterial.content.$case === 'x509CertificateChain');
      -}
      -exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
      -function isCAVerificationOptions(options) {
      -    return (options.ctlogOptions !== undefined &&
      -        (options.signers === undefined ||
      -            options.signers.$case === 'certificateIdentities'));
      -}
      -exports.isCAVerificationOptions = isCAVerificationOptions;
      -function isVerifiableTransparencyLogEntry(entry) {
      -    return (entry.logId !== undefined &&
      -        entry.inclusionPromise !== undefined &&
      -        entry.kindVersion !== undefined);
      -}
      -exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry;
      -// All of the following functions are used to construct a ValidBundle
      -// from various types of input. When this code moves into the
      -// @sigstore/sign package, these functions will be exported from there.
      -function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }) {
      -    return {
      -        mediaType: BUNDLE_MEDIA_TYPE,
      -        content: { $case: 'dsseEnvelope', dsseEnvelope: envelope },
      -        verificationMaterial: toVerificationMaterial({
      -            signature,
      -            tlogEntry,
      -            timestamp,
      -        }),
      -    };
      -}
      -exports.toDSSEBundle = toDSSEBundle;
      -function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) {
      -    return {
      -        mediaType: BUNDLE_MEDIA_TYPE,
      -        content: {
      -            $case: 'messageSignature',
      -            messageSignature: {
      -                messageDigest: {
      -                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
      -                    digest: digest,
      -                },
      -                signature: signature.signature,
      -            },
      -        },
      -        verificationMaterial: toVerificationMaterial({
      -            signature,
      -            tlogEntry,
      -            timestamp,
      -        }),
      -    };
      -}
      -exports.toMessageSignatureBundle = toMessageSignatureBundle;
      -function toTransparencyLogEntry(entry) {
      -    const b64SET = entry.verification?.signedEntryTimestamp || '';
      -    const set = Buffer.from(b64SET, 'base64');
      -    const logID = Buffer.from(entry.logID, 'hex');
      -    const proof = entry.verification?.inclusionProof
      -        ? toInclusionProof(entry.verification.inclusionProof)
      -        : undefined;
      -    // Parse entry body so we can extract the kind and version.
      -    const bodyJSON = util_1.encoding.base64Decode(entry.body);
      -    const entryBody = JSON.parse(bodyJSON);
      -    return {
      -        inclusionPromise: {
      -            signedEntryTimestamp: set,
      -        },
      -        logIndex: entry.logIndex.toString(),
      -        logId: {
      -            keyId: logID,
      -        },
      -        integratedTime: entry.integratedTime.toString(),
      -        kindVersion: {
      -            kind: entryBody.kind,
      -            version: entryBody.apiVersion,
      -        },
      -        inclusionProof: proof,
      -        canonicalizedBody: Buffer.from(entry.body, 'base64'),
      -    };
      -}
      -function toInclusionProof(proof) {
      -    return {
      -        logIndex: proof.logIndex.toString(),
      -        rootHash: Buffer.from(proof.rootHash, 'hex'),
      -        treeSize: proof.treeSize.toString(),
      -        checkpoint: {
      -            envelope: proof.checkpoint,
      -        },
      -        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
      -    };
      -}
      -function toVerificationMaterial({ signature, tlogEntry, timestamp, }) {
      -    return {
      -        content: signature.certificates
      -            ? toVerificationMaterialx509CertificateChain(signature.certificates)
      -            : toVerificationMaterialPublicKey(signature.key.id || ''),
      -        tlogEntries: tlogEntry ? [toTransparencyLogEntry(tlogEntry)] : [],
      -        timestampVerificationData: timestamp
      -            ? toTimestampVerificationData(timestamp)
      -            : undefined,
      -    };
      -}
      -function toVerificationMaterialx509CertificateChain(certificates) {
      -    return {
      -        $case: 'x509CertificateChain',
      -        x509CertificateChain: {
      -            certificates: certificates.map((c) => ({
      -                rawBytes: util_1.pem.toDER(c),
      -            })),
      -        },
      -    };
      -}
      -function toVerificationMaterialPublicKey(hint) {
      -    return { $case: 'publicKey', publicKey: { hint } };
      -}
      -function toTimestampVerificationData(timestamp) {
      -    return {
      -        rfc3161Timestamps: [{ signedTimestamp: timestamp }],
      -    };
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
      deleted file mode 100644
      index 8ea3b5cff35ee9..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
      +++ /dev/null
      @@ -1,65 +0,0 @@
      -import { OneOf } from '../utility';
      -type SerializedTLogEntry = {
      -    logIndex: string;
      -    logId: {
      -        keyId: string;
      -    };
      -    kindVersion: {
      -        kind: string;
      -        version: string;
      -    } | undefined;
      -    integratedTime: string;
      -    inclusionPromise: {
      -        signedEntryTimestamp: string;
      -    };
      -    inclusionProof: {
      -        logIndex: string;
      -        rootHash: string;
      -        treeSize: string;
      -        hashes: string[];
      -        checkpoint: {
      -            envelope: string;
      -        };
      -    } | undefined;
      -    canonicalizedBody: string;
      -};
      -type SerializedTimestampVerificationData = {
      -    rfc3161Timestamps: {
      -        signedTimestamp: string;
      -    }[];
      -};
      -type SerializedMessageSignature = {
      -    messageDigest: {
      -        algorithm: string;
      -        digest: string;
      -    } | undefined;
      -    signature: string;
      -};
      -type SerializedDSSEEnvelope = {
      -    payload: string;
      -    payloadType: string;
      -    signatures: {
      -        sig: string;
      -        keyid: string;
      -    }[];
      -};
      -export type { SerializedDSSEEnvelope as SerializedEnvelope };
      -export type SerializedBundle = {
      -    mediaType: string;
      -    verificationMaterial: (OneOf<{
      -        x509CertificateChain: {
      -            certificates: {
      -                rawBytes: string;
      -            }[];
      -        };
      -        publicKey: {
      -            hint: string;
      -        };
      -    }> | undefined) & {
      -        tlogEntries: SerializedTLogEntry[];
      -        timestampVerificationData: SerializedTimestampVerificationData | undefined;
      -    };
      -} & OneOf<{
      -    dsseEnvelope: SerializedDSSEEnvelope;
      -    messageSignature: SerializedMessageSignature;
      -}>;
      diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts
      deleted file mode 100644
      index a6c33b3c7c0f28..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts
      +++ /dev/null
      @@ -1,15 +0,0 @@
      -import { WithRequired } from '../utility';
      -import type { Bundle, MessageSignature, VerificationMaterial } from '@sigstore/protobuf-specs';
      -export type ValidBundle = Bundle & {
      -    verificationMaterial: VerificationMaterial & {
      -        content: NonNullable;
      -    };
      -    content: (Extract & {
      -        messageSignature: WithRequired;
      -    }) | Extract;
      -};
      -export declare function assertValidBundle(b: Bundle): asserts b is ValidBundle;
      diff --git a/deps/npm/node_modules/sigstore/dist/types/utility.d.ts b/deps/npm/node_modules/sigstore/dist/types/utility.d.ts
      deleted file mode 100644
      index df993d503f8ea1..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/types/utility.d.ts
      +++ /dev/null
      @@ -1,14 +0,0 @@
      -type ValueOf = Obj[keyof Obj];
      -type OneOnly = {
      -    [key in Exclude]: undefined;
      -} & {
      -    [key in K]: Obj[K];
      -};
      -type OneOfByKey = {
      -    [key in keyof Obj]: OneOnly;
      -};
      -export type OneOf = ValueOf>;
      -export type WithRequired = T & {
      -    [P in K]-?: NonNullable;
      -};
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/types/utility.js b/deps/npm/node_modules/sigstore/dist/types/utility.js
      index 132848cd7587e7..77c91b1923ca08 100644
      --- a/deps/npm/node_modules/sigstore/dist/types/utility.js
      +++ b/deps/npm/node_modules/sigstore/dist/types/utility.js
      @@ -14,5 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
       See the License for the specific language governing permissions and
       limitations under the License.
       */
      -// https://dev.to/maxime1992/implement-a-generic-oneof-type-with-typescript-22em
       Object.defineProperty(exports, "__esModule", { value: true });
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts
      deleted file mode 100644
      index 3f192dea45445c..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -import { ASN1Obj } from './obj';
      -export declare function dump(obj: ASN1Obj, indent?: number): void;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts
      deleted file mode 100644
      index fcd908f47036ac..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts
      +++ /dev/null
      @@ -1,4 +0,0 @@
      -export declare class ASN1ParseError extends Error {
      -}
      -export declare class ASN1TypeError extends Error {
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts
      deleted file mode 100644
      index da45453d4eab7b..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export { ASN1Obj } from './obj';
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts
      deleted file mode 100644
      index 97c7114af29091..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts
      +++ /dev/null
      @@ -1,4 +0,0 @@
      -/// 
      -import { ByteStream } from '../stream';
      -export declare function decodeLength(stream: ByteStream): number;
      -export declare function encodeLength(len: number): Buffer;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts
      deleted file mode 100644
      index de54996c87faac..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts
      +++ /dev/null
      @@ -1,15 +0,0 @@
      -/// 
      -import { ASN1Tag } from './tag';
      -export declare class ASN1Obj {
      -    readonly tag: ASN1Tag;
      -    readonly subs: ASN1Obj[];
      -    readonly value: Buffer;
      -    constructor(tag: ASN1Tag, value: Buffer, subs: ASN1Obj[]);
      -    static parseBuffer(buf: Buffer): ASN1Obj;
      -    toDER(): Buffer;
      -    toBoolean(): boolean;
      -    toInteger(): bigint;
      -    toOID(): string;
      -    toDate(): Date;
      -    toBitString(): number[];
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts
      deleted file mode 100644
      index 35989d5510e26b..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts
      +++ /dev/null
      @@ -1,7 +0,0 @@
      -/// 
      -export declare function parseInteger(buf: Buffer): bigint;
      -export declare function parseStringASCII(buf: Buffer): string;
      -export declare function parseTime(buf: Buffer, shortYear: boolean): Date;
      -export declare function parseOID(buf: Buffer): string;
      -export declare function parseBoolean(buf: Buffer): boolean;
      -export declare function parseBitString(buf: Buffer): number[];
      diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts
      deleted file mode 100644
      index cdc9a69097b380..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts
      +++ /dev/null
      @@ -1,28 +0,0 @@
      -export declare const UNIVERSAL_TAG: {
      -    BOOLEAN: number;
      -    INTEGER: number;
      -    BIT_STRING: number;
      -    OCTET_STRING: number;
      -    OBJECT_IDENTIFIER: number;
      -    SEQUENCE: number;
      -    SET: number;
      -    PRINTABLE_STRING: number;
      -    UTC_TIME: number;
      -    GENERALIZED_TIME: number;
      -};
      -export declare class ASN1Tag {
      -    readonly number: number;
      -    readonly constructed: boolean;
      -    readonly class: number;
      -    constructor(enc: number);
      -    isUniversal(): boolean;
      -    isContextSpecific(num?: number): boolean;
      -    isBoolean(): boolean;
      -    isInteger(): boolean;
      -    isBitString(): boolean;
      -    isOctetString(): boolean;
      -    isOID(): boolean;
      -    isUTCTime(): boolean;
      -    isGeneralizedTime(): boolean;
      -    toDER(): number;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts b/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts
      deleted file mode 100644
      index a726dd260750c3..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts
      +++ /dev/null
      @@ -1,10 +0,0 @@
      -/// 
      -/// 
      -/// 
      -import { BinaryLike, KeyLike, KeyPairKeyObjectResult } from 'crypto';
      -export declare function generateKeyPair(): KeyPairKeyObjectResult;
      -export declare function createPublicKey(key: string | Buffer): KeyLike;
      -export declare function signBlob(data: NodeJS.ArrayBufferView, privateKey: KeyLike): Buffer;
      -export declare function verifyBlob(data: Buffer, key: KeyLike, signature: Buffer, algorithm?: string): boolean;
      -export declare function hash(data: BinaryLike): Buffer;
      -export declare function randomBytes(count: number): Buffer;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/crypto.js b/deps/npm/node_modules/sigstore/dist/util/crypto.js
      index 0b1e0bc62d8abb..c26de091ecdb62 100644
      --- a/deps/npm/node_modules/sigstore/dist/util/crypto.js
      +++ b/deps/npm/node_modules/sigstore/dist/util/crypto.js
      @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
           return (mod && mod.__esModule) ? mod : { "default": mod };
       };
       Object.defineProperty(exports, "__esModule", { value: true });
      -exports.randomBytes = exports.hash = exports.verifyBlob = exports.signBlob = exports.createPublicKey = exports.generateKeyPair = void 0;
      +exports.bufferEqual = exports.randomBytes = exports.hash = exports.verifyBlob = exports.createPublicKey = void 0;
       /*
       Copyright 2022 The Sigstore Authors.
       
      @@ -20,15 +20,7 @@ See the License for the specific language governing permissions and
       limitations under the License.
       */
       const crypto_1 = __importDefault(require("crypto"));
      -const EC_KEYPAIR_TYPE = 'ec';
      -const P256_CURVE = 'P-256';
       const SHA256_ALGORITHM = 'sha256';
      -function generateKeyPair() {
      -    return crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
      -        namedCurve: P256_CURVE,
      -    });
      -}
      -exports.generateKeyPair = generateKeyPair;
       function createPublicKey(key) {
           if (typeof key === 'string') {
               return crypto_1.default.createPublicKey(key);
      @@ -38,10 +30,6 @@ function createPublicKey(key) {
           }
       }
       exports.createPublicKey = createPublicKey;
      -function signBlob(data, privateKey) {
      -    return crypto_1.default.sign(null, data, privateKey);
      -}
      -exports.signBlob = signBlob;
       function verifyBlob(data, key, signature, algorithm) {
           // The try/catch is to work around an issue in Node 14.x where verify throws
           // an error in some scenarios if the signature is invalid.
      @@ -49,6 +37,7 @@ function verifyBlob(data, key, signature, algorithm) {
               return crypto_1.default.verify(algorithm, data, key, signature);
           }
           catch (e) {
      +        /* istanbul ignore next */
               return false;
           }
       }
      @@ -62,3 +51,13 @@ function randomBytes(count) {
           return crypto_1.default.randomBytes(count);
       }
       exports.randomBytes = randomBytes;
      +function bufferEqual(a, b) {
      +    try {
      +        return crypto_1.default.timingSafeEqual(a, b);
      +    }
      +    catch {
      +        /* istanbul ignore next */
      +        return false;
      +    }
      +}
      +exports.bufferEqual = bufferEqual;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts b/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts
      deleted file mode 100644
      index 839b9c03ce38c7..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts
      +++ /dev/null
      @@ -1,2 +0,0 @@
      -/// 
      -export declare function preAuthEncoding(payloadType: string, payload: Buffer): Buffer;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts b/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts
      deleted file mode 100644
      index f1347c241ed0c4..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts
      +++ /dev/null
      @@ -1,6 +0,0 @@
      -export declare function base64Encode(str: string): string;
      -export declare function base64Decode(str: string): string;
      -export declare function base64URLEncode(str: string): string;
      -export declare function base64URLDecode(str: string): string;
      -export declare function base64URLEscape(str: string): string;
      -export declare function base64URLUnescape(str: string): string;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/index.d.ts b/deps/npm/node_modules/sigstore/dist/util/index.d.ts
      deleted file mode 100644
      index f062a1c9d3c57d..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/index.d.ts
      +++ /dev/null
      @@ -1,9 +0,0 @@
      -export * as asn1 from './asn1';
      -export * as crypto from './crypto';
      -export * as dsse from './dsse';
      -export * as encoding from './encoding';
      -export * as json from './json';
      -export * as oidc from './oidc';
      -export * as pem from './pem';
      -export * as promise from './promise';
      -export * as ua from './ua';
      diff --git a/deps/npm/node_modules/sigstore/dist/util/index.js b/deps/npm/node_modules/sigstore/dist/util/index.js
      index b7d6ce21aafd3b..ff4cec375af8f8 100644
      --- a/deps/npm/node_modules/sigstore/dist/util/index.js
      +++ b/deps/npm/node_modules/sigstore/dist/util/index.js
      @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
           return result;
       };
       Object.defineProperty(exports, "__esModule", { value: true });
      -exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
      +exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
       /*
       Copyright 2022 The Sigstore Authors.
       
      @@ -44,7 +44,4 @@ exports.crypto = __importStar(require("./crypto"));
       exports.dsse = __importStar(require("./dsse"));
       exports.encoding = __importStar(require("./encoding"));
       exports.json = __importStar(require("./json"));
      -exports.oidc = __importStar(require("./oidc"));
       exports.pem = __importStar(require("./pem"));
      -exports.promise = __importStar(require("./promise"));
      -exports.ua = __importStar(require("./ua"));
      diff --git a/deps/npm/node_modules/sigstore/dist/util/json.d.ts b/deps/npm/node_modules/sigstore/dist/util/json.d.ts
      deleted file mode 100644
      index ed331817ef2360..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/json.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export declare function canonicalize(object: any): string;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts b/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts
      deleted file mode 100644
      index b4513891a3527f..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export declare function extractJWTSubject(jwt: string): string;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/pem.d.ts b/deps/npm/node_modules/sigstore/dist/util/pem.d.ts
      deleted file mode 100644
      index 6910679cae0654..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/pem.d.ts
      +++ /dev/null
      @@ -1,3 +0,0 @@
      -/// 
      -export declare function toDER(certificate: string): Buffer;
      -export declare function fromDER(certificate: Buffer, type?: string): string;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/promise.d.ts b/deps/npm/node_modules/sigstore/dist/util/promise.d.ts
      deleted file mode 100644
      index bbc501a85a7c60..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/promise.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export declare const promiseAny: (values: Iterable>) => Promise;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/promise.js b/deps/npm/node_modules/sigstore/dist/util/promise.js
      deleted file mode 100644
      index 8101dd47afe026..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/promise.js
      +++ /dev/null
      @@ -1,27 +0,0 @@
      -"use strict";
      -/*
      -Copyright 2022 The Sigstore Authors.
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -    http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -*/
      -Object.defineProperty(exports, "__esModule", { value: true });
      -exports.promiseAny = void 0;
      -// Implementation of Promise.any (not available until Node v15).
      -// We're basically inverting the logic of Promise.all and taking advantage
      -// of the fact that Promise.all will return early on the first rejection.
      -// By reversing the resolve/reject logic we can use this to return early
      -// on the first resolved promise.
      -const promiseAny = async (values) => {
      -    return Promise.all([...values].map((promise) => new Promise((resolve, reject) => promise.then(reject, resolve)))).then((errors) => Promise.reject(errors), (value) => Promise.resolve(value));
      -};
      -exports.promiseAny = promiseAny;
      diff --git a/deps/npm/node_modules/sigstore/dist/util/stream.d.ts b/deps/npm/node_modules/sigstore/dist/util/stream.d.ts
      deleted file mode 100644
      index 4d509565942e14..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/stream.d.ts
      +++ /dev/null
      @@ -1,24 +0,0 @@
      -/// 
      -export declare class StreamError extends Error {
      -}
      -export declare class ByteStream {
      -    private static BLOCK_SIZE;
      -    private buf;
      -    private view;
      -    private start;
      -    constructor(buffer?: ArrayBuffer);
      -    get buffer(): Buffer;
      -    get length(): number;
      -    get position(): number;
      -    seek(position: number): void;
      -    slice(start: number, len: number): Buffer;
      -    appendChar(char: number): void;
      -    appendUint16(num: number): void;
      -    appendUint24(num: number): void;
      -    appendView(view: Uint8Array): void;
      -    getBlock(size: number): Buffer;
      -    getUint8(): number;
      -    getUint16(): number;
      -    private ensureCapacity;
      -    private realloc;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/util/ua.d.ts b/deps/npm/node_modules/sigstore/dist/util/ua.d.ts
      deleted file mode 100644
      index b60e2e9c3e5374..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/util/ua.d.ts
      +++ /dev/null
      @@ -1 +0,0 @@
      -export declare const getUserAgent: () => string;
      diff --git a/deps/npm/node_modules/sigstore/dist/verify.d.ts b/deps/npm/node_modules/sigstore/dist/verify.d.ts
      deleted file mode 100644
      index 850d0f37f09817..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/verify.d.ts
      +++ /dev/null
      @@ -1,13 +0,0 @@
      -/// 
      -import * as sigstore from './types/sigstore';
      -export type KeySelector = (hint: string) => string | Buffer | undefined;
      -export declare class Verifier {
      -    private trustedRoot;
      -    private keySelector;
      -    constructor(trustedRoot: sigstore.TrustedRoot, keySelector?: KeySelector);
      -    verify(bundle: sigstore.Bundle, options: sigstore.RequiredArtifactVerificationOptions, data?: Buffer): void;
      -    private verifyArtifactSignature;
      -    private verifySigningCertificate;
      -    private verifyTLogEntries;
      -    private getPublicKey;
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/verify.js b/deps/npm/node_modules/sigstore/dist/verify.js
      index 49f63d93abb268..a3dc4b307e4953 100644
      --- a/deps/npm/node_modules/sigstore/dist/verify.js
      +++ b/deps/npm/node_modules/sigstore/dist/verify.js
      @@ -24,6 +24,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
       };
       Object.defineProperty(exports, "__esModule", { value: true });
       exports.Verifier = void 0;
      +/*
      +Copyright 2023 The Sigstore Authors.
      +
      +Licensed under the Apache License, Version 2.0 (the "License");
      +you may not use this file except in compliance with the License.
      +You may obtain a copy of the License at
      +
      +    http://www.apache.org/licenses/LICENSE-2.0
      +
      +Unless required by applicable law or agreed to in writing, software
      +distributed under the License is distributed on an "AS IS" BASIS,
      +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      +See the License for the specific language governing permissions and
      +limitations under the License.
      +*/
      +const bundle_1 = require("@sigstore/bundle");
       const ca = __importStar(require("./ca/verify"));
       const error_1 = require("./error");
       const tlog = __importStar(require("./tlog/verify"));
      @@ -38,7 +54,7 @@ class Verifier {
           // and the bundle's transparency log entries.
           verify(bundle, options, data) {
               this.verifyArtifactSignature(bundle, data);
      -        if (sigstore.isBundleWithCertificateChain(bundle)) {
      +        if ((0, bundle_1.isBundleWithCertificateChain)(bundle)) {
                   this.verifySigningCertificate(bundle, options);
               }
               if (options.tlogOptions.disable === false) {
      diff --git a/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts b/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts
      deleted file mode 100644
      index 216dbd39cb1f7d..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts
      +++ /dev/null
      @@ -1,48 +0,0 @@
      -/// 
      -import * as sigstore from '../types/sigstore';
      -import { ASN1Obj } from '../util/asn1';
      -import { x509AuthorityKeyIDExtension, x509BasicConstraintsExtension, x509Extension, x509KeyUsageExtension, x509SCTExtension, x509SubjectAlternativeNameExtension, x509SubjectKeyIDExtension } from './ext';
      -interface SCTVerificationResult {
      -    verified: boolean;
      -    logID: Buffer;
      -}
      -export declare class x509Certificate {
      -    root: ASN1Obj;
      -    constructor(asn1: ASN1Obj);
      -    static parse(cert: Buffer | string): x509Certificate;
      -    get tbsCertificate(): ASN1Obj;
      -    get version(): string;
      -    get notBefore(): Date;
      -    get notAfter(): Date;
      -    get issuer(): Buffer;
      -    get subject(): Buffer;
      -    get publicKey(): Buffer;
      -    get signatureAlgorithm(): string;
      -    get signatureValue(): Buffer;
      -    get extensions(): ASN1Obj[];
      -    get extKeyUsage(): x509KeyUsageExtension | undefined;
      -    get extBasicConstraints(): x509BasicConstraintsExtension | undefined;
      -    get extSubjectAltName(): x509SubjectAlternativeNameExtension | undefined;
      -    get extAuthorityKeyID(): x509AuthorityKeyIDExtension | undefined;
      -    get extSubjectKeyID(): x509SubjectKeyIDExtension | undefined;
      -    get extSCT(): x509SCTExtension | undefined;
      -    get isCA(): boolean;
      -    extension(oid: string): x509Extension | undefined;
      -    verify(issuerCertificate?: x509Certificate): boolean;
      -    validForDate(date: Date): boolean;
      -    equals(other: x509Certificate): boolean;
      -    verifySCTs(issuer: x509Certificate, logs: sigstore.TransparencyLogInstance[]): SCTVerificationResult[];
      -    private clone;
      -    private findExtension;
      -    private checkRecognizedExtensions;
      -    private get tbsCertificateObj();
      -    private get signatureAlgorithmObj();
      -    private get signatureValueObj();
      -    private get versionObj();
      -    private get issuerObj();
      -    private get validityObj();
      -    private get subjectObj();
      -    private get subjectPublicKeyInfoObj();
      -    private get extensionsObj();
      -}
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts b/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts
      deleted file mode 100644
      index d6285f306f6adc..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts
      +++ /dev/null
      @@ -1,42 +0,0 @@
      -/// 
      -import { ASN1Obj } from '../util/asn1';
      -import { SignedCertificateTimestamp } from './sct';
      -export declare class x509Extension {
      -    protected root: ASN1Obj;
      -    constructor(asn1: ASN1Obj);
      -    get oid(): string;
      -    get critical(): boolean;
      -    get value(): Buffer;
      -    get valueObj(): ASN1Obj;
      -    protected get extnValueObj(): ASN1Obj;
      -}
      -export declare class x509BasicConstraintsExtension extends x509Extension {
      -    get isCA(): boolean;
      -    get pathLenConstraint(): bigint | undefined;
      -    private get sequence();
      -}
      -export declare class x509KeyUsageExtension extends x509Extension {
      -    get digitalSignature(): boolean;
      -    get keyCertSign(): boolean;
      -    get crlSign(): boolean;
      -    private get bitString();
      -}
      -export declare class x509SubjectAlternativeNameExtension extends x509Extension {
      -    get rfc822Name(): string | undefined;
      -    get uri(): string | undefined;
      -    otherName(oid: string): string | undefined;
      -    private findGeneralName;
      -    private get generalNames();
      -}
      -export declare class x509AuthorityKeyIDExtension extends x509Extension {
      -    get keyIdentifier(): Buffer | undefined;
      -    private findSequenceMember;
      -    private get sequence();
      -}
      -export declare class x509SubjectKeyIDExtension extends x509Extension {
      -    get keyIdentifier(): Buffer;
      -}
      -export declare class x509SCTExtension extends x509Extension {
      -    constructor(asn1: ASN1Obj);
      -    get signedCertificateTimestamps(): SignedCertificateTimestamp[];
      -}
      diff --git a/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts b/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts
      deleted file mode 100644
      index 076a532984c6b8..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts
      +++ /dev/null
      @@ -1,26 +0,0 @@
      -/// 
      -import * as sigstore from '../types/sigstore';
      -interface SCTOptions {
      -    version: number;
      -    logID: Buffer;
      -    timestamp: Buffer;
      -    extensions: Buffer;
      -    hashAlgorithm: number;
      -    signatureAlgorithm: number;
      -    signature: Buffer;
      -}
      -export declare class SignedCertificateTimestamp {
      -    readonly version: number;
      -    readonly logID: Buffer;
      -    readonly timestamp: Buffer;
      -    readonly extensions: Buffer;
      -    readonly hashAlgorithm: number;
      -    readonly signatureAlgorithm: number;
      -    readonly signature: Buffer;
      -    constructor(options: SCTOptions);
      -    get datetime(): Date;
      -    get algorithm(): string;
      -    verify(preCert: Buffer, logs: sigstore.TransparencyLogInstance[]): boolean;
      -    static parse(buf: Buffer): SignedCertificateTimestamp;
      -}
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts b/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts
      deleted file mode 100644
      index b12594adb2ea88..00000000000000
      --- a/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts
      +++ /dev/null
      @@ -1,8 +0,0 @@
      -import { x509Certificate } from './cert';
      -interface VerifyCertificateChainOptions {
      -    trustedCerts: x509Certificate[];
      -    untrustedCert: x509Certificate;
      -    validAt?: Date;
      -}
      -export declare function verifyCertificateChain(opts: VerifyCertificateChainOptions): x509Certificate[];
      -export {};
      diff --git a/deps/npm/node_modules/sigstore/package.json b/deps/npm/node_modules/sigstore/package.json
      index 02655a6c79bc81..daf50ba601884c 100644
      --- a/deps/npm/node_modules/sigstore/package.json
      +++ b/deps/npm/node_modules/sigstore/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "sigstore",
      -  "version": "1.7.0",
      +  "version": "2.1.0",
         "description": "code-signing for npm packages",
         "main": "dist/index.js",
         "types": "dist/index.d.ts",
      @@ -9,9 +9,6 @@
           "build": "tsc --build",
           "test": "jest"
         },
      -  "bin": {
      -    "sigstore": "bin/sigstore.js"
      -  },
         "files": [
           "dist",
           "store"
      @@ -30,17 +27,19 @@
           "provenance": true
         },
         "devDependencies": {
      -    "@sigstore/rekor-types": "^1.0.0",
      +    "@sigstore/rekor-types": "^2.0.0",
           "@sigstore/jest": "^0.0.0",
      -    "@tufjs/repo-mock": "^1.1.0",
      +    "@sigstore/mock": "^0.4.0",
      +    "@tufjs/repo-mock": "^2.0.0",
           "@types/make-fetch-happen": "^10.0.0"
         },
         "dependencies": {
      -    "@sigstore/protobuf-specs": "^0.1.0",
      -    "@sigstore/tuf": "^1.0.1",
      -    "make-fetch-happen": "^11.0.1"
      +    "@sigstore/bundle": "^2.1.0",
      +    "@sigstore/protobuf-specs": "^0.2.1",
      +    "@sigstore/sign": "^2.1.0",
      +    "@sigstore/tuf": "^2.1.0"
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         }
       }
      diff --git a/deps/npm/node_modules/ssri/package.json b/deps/npm/node_modules/ssri/package.json
      index 815c7f3ed03ae9..8750bd744d28bd 100644
      --- a/deps/npm/node_modules/ssri/package.json
      +++ b/deps/npm/node_modules/ssri/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "ssri",
      -  "version": "10.0.4",
      +  "version": "10.0.5",
         "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
         "main": "lib/index.js",
         "files": [
      @@ -47,11 +47,11 @@
         "author": "GitHub Inc.",
         "license": "ISC",
         "dependencies": {
      -    "minipass": "^5.0.0"
      +    "minipass": "^7.0.3"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.14.1",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.0.1"
         },
         "engines": {
      @@ -59,7 +59,7 @@
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.14.1",
      +    "version": "4.18.0",
           "publish": "true"
         }
       }
      diff --git a/deps/npm/node_modules/tar/node_modules/minipass/LICENSE b/deps/npm/node_modules/tar/node_modules/minipass/LICENSE
      new file mode 100644
      index 00000000000000..97f8e32ed82e4c
      --- /dev/null
      +++ b/deps/npm/node_modules/tar/node_modules/minipass/LICENSE
      @@ -0,0 +1,15 @@
      +The ISC License
      +
      +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
      +
      +Permission to use, copy, modify, and/or distribute this software for any
      +purpose with or without fee is hereby granted, provided that the above
      +copyright notice and this permission notice appear in all copies.
      +
      +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
      +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
      +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
      +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
      +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
      +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
      +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
      diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.js b/deps/npm/node_modules/tar/node_modules/minipass/index.js
      new file mode 100644
      index 00000000000000..ed07c17acd97b7
      --- /dev/null
      +++ b/deps/npm/node_modules/tar/node_modules/minipass/index.js
      @@ -0,0 +1,702 @@
      +'use strict'
      +const proc =
      +  typeof process === 'object' && process
      +    ? process
      +    : {
      +        stdout: null,
      +        stderr: null,
      +      }
      +const EE = require('events')
      +const Stream = require('stream')
      +const stringdecoder = require('string_decoder')
      +const SD = stringdecoder.StringDecoder
      +
      +const EOF = Symbol('EOF')
      +const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
      +const EMITTED_END = Symbol('emittedEnd')
      +const EMITTING_END = Symbol('emittingEnd')
      +const EMITTED_ERROR = Symbol('emittedError')
      +const CLOSED = Symbol('closed')
      +const READ = Symbol('read')
      +const FLUSH = Symbol('flush')
      +const FLUSHCHUNK = Symbol('flushChunk')
      +const ENCODING = Symbol('encoding')
      +const DECODER = Symbol('decoder')
      +const FLOWING = Symbol('flowing')
      +const PAUSED = Symbol('paused')
      +const RESUME = Symbol('resume')
      +const BUFFER = Symbol('buffer')
      +const PIPES = Symbol('pipes')
      +const BUFFERLENGTH = Symbol('bufferLength')
      +const BUFFERPUSH = Symbol('bufferPush')
      +const BUFFERSHIFT = Symbol('bufferShift')
      +const OBJECTMODE = Symbol('objectMode')
      +// internal event when stream is destroyed
      +const DESTROYED = Symbol('destroyed')
      +// internal event when stream has an error
      +const ERROR = Symbol('error')
      +const EMITDATA = Symbol('emitData')
      +const EMITEND = Symbol('emitEnd')
      +const EMITEND2 = Symbol('emitEnd2')
      +const ASYNC = Symbol('async')
      +const ABORT = Symbol('abort')
      +const ABORTED = Symbol('aborted')
      +const SIGNAL = Symbol('signal')
      +
      +const defer = fn => Promise.resolve().then(fn)
      +
      +// TODO remove when Node v8 support drops
      +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
      +const ASYNCITERATOR =
      +  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
      +const ITERATOR =
      +  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
      +
      +// events that mean 'the stream is over'
      +// these are treated specially, and re-emitted
      +// if they are listened for after emitting.
      +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
      +
      +const isArrayBuffer = b =>
      +  b instanceof ArrayBuffer ||
      +  (typeof b === 'object' &&
      +    b.constructor &&
      +    b.constructor.name === 'ArrayBuffer' &&
      +    b.byteLength >= 0)
      +
      +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
      +
      +class Pipe {
      +  constructor(src, dest, opts) {
      +    this.src = src
      +    this.dest = dest
      +    this.opts = opts
      +    this.ondrain = () => src[RESUME]()
      +    dest.on('drain', this.ondrain)
      +  }
      +  unpipe() {
      +    this.dest.removeListener('drain', this.ondrain)
      +  }
      +  // istanbul ignore next - only here for the prototype
      +  proxyErrors() {}
      +  end() {
      +    this.unpipe()
      +    if (this.opts.end) this.dest.end()
      +  }
      +}
      +
      +class PipeProxyErrors extends Pipe {
      +  unpipe() {
      +    this.src.removeListener('error', this.proxyErrors)
      +    super.unpipe()
      +  }
      +  constructor(src, dest, opts) {
      +    super(src, dest, opts)
      +    this.proxyErrors = er => dest.emit('error', er)
      +    src.on('error', this.proxyErrors)
      +  }
      +}
      +
      +class Minipass extends Stream {
      +  constructor(options) {
      +    super()
      +    this[FLOWING] = false
      +    // whether we're explicitly paused
      +    this[PAUSED] = false
      +    this[PIPES] = []
      +    this[BUFFER] = []
      +    this[OBJECTMODE] = (options && options.objectMode) || false
      +    if (this[OBJECTMODE]) this[ENCODING] = null
      +    else this[ENCODING] = (options && options.encoding) || null
      +    if (this[ENCODING] === 'buffer') this[ENCODING] = null
      +    this[ASYNC] = (options && !!options.async) || false
      +    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
      +    this[EOF] = false
      +    this[EMITTED_END] = false
      +    this[EMITTING_END] = false
      +    this[CLOSED] = false
      +    this[EMITTED_ERROR] = null
      +    this.writable = true
      +    this.readable = true
      +    this[BUFFERLENGTH] = 0
      +    this[DESTROYED] = false
      +    if (options && options.debugExposeBuffer === true) {
      +      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
      +    }
      +    if (options && options.debugExposePipes === true) {
      +      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
      +    }
      +    this[SIGNAL] = options && options.signal
      +    this[ABORTED] = false
      +    if (this[SIGNAL]) {
      +      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
      +      if (this[SIGNAL].aborted) {
      +        this[ABORT]()
      +      }
      +    }
      +  }
      +
      +  get bufferLength() {
      +    return this[BUFFERLENGTH]
      +  }
      +
      +  get encoding() {
      +    return this[ENCODING]
      +  }
      +  set encoding(enc) {
      +    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
      +
      +    if (
      +      this[ENCODING] &&
      +      enc !== this[ENCODING] &&
      +      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
      +    )
      +      throw new Error('cannot change encoding')
      +
      +    if (this[ENCODING] !== enc) {
      +      this[DECODER] = enc ? new SD(enc) : null
      +      if (this[BUFFER].length)
      +        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
      +    }
      +
      +    this[ENCODING] = enc
      +  }
      +
      +  setEncoding(enc) {
      +    this.encoding = enc
      +  }
      +
      +  get objectMode() {
      +    return this[OBJECTMODE]
      +  }
      +  set objectMode(om) {
      +    this[OBJECTMODE] = this[OBJECTMODE] || !!om
      +  }
      +
      +  get ['async']() {
      +    return this[ASYNC]
      +  }
      +  set ['async'](a) {
      +    this[ASYNC] = this[ASYNC] || !!a
      +  }
      +
      +  // drop everything and get out of the flow completely
      +  [ABORT]() {
      +    this[ABORTED] = true
      +    this.emit('abort', this[SIGNAL].reason)
      +    this.destroy(this[SIGNAL].reason)
      +  }
      +
      +  get aborted() {
      +    return this[ABORTED]
      +  }
      +  set aborted(_) {}
      +
      +  write(chunk, encoding, cb) {
      +    if (this[ABORTED]) return false
      +    if (this[EOF]) throw new Error('write after end')
      +
      +    if (this[DESTROYED]) {
      +      this.emit(
      +        'error',
      +        Object.assign(
      +          new Error('Cannot call write after a stream was destroyed'),
      +          { code: 'ERR_STREAM_DESTROYED' }
      +        )
      +      )
      +      return true
      +    }
      +
      +    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
      +
      +    if (!encoding) encoding = 'utf8'
      +
      +    const fn = this[ASYNC] ? defer : f => f()
      +
      +    // convert array buffers and typed array views into buffers
      +    // at some point in the future, we may want to do the opposite!
      +    // leave strings and buffers as-is
      +    // anything else switches us into object mode
      +    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
      +      if (isArrayBufferView(chunk))
      +        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
      +      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
      +      else if (typeof chunk !== 'string')
      +        // use the setter so we throw if we have encoding set
      +        this.objectMode = true
      +    }
      +
      +    // handle object mode up front, since it's simpler
      +    // this yields better performance, fewer checks later.
      +    if (this[OBJECTMODE]) {
      +      /* istanbul ignore if - maybe impossible? */
      +      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
      +
      +      if (this.flowing) this.emit('data', chunk)
      +      else this[BUFFERPUSH](chunk)
      +
      +      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +
      +      if (cb) fn(cb)
      +
      +      return this.flowing
      +    }
      +
      +    // at this point the chunk is a buffer or string
      +    // don't buffer it up or send it to the decoder
      +    if (!chunk.length) {
      +      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +      if (cb) fn(cb)
      +      return this.flowing
      +    }
      +
      +    // fast-path writing strings of same encoding to a stream with
      +    // an empty buffer, skipping the buffer/decoder dance
      +    if (
      +      typeof chunk === 'string' &&
      +      // unless it is a string already ready for us to use
      +      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
      +    ) {
      +      chunk = Buffer.from(chunk, encoding)
      +    }
      +
      +    if (Buffer.isBuffer(chunk) && this[ENCODING])
      +      chunk = this[DECODER].write(chunk)
      +
      +    // Note: flushing CAN potentially switch us into not-flowing mode
      +    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
      +
      +    if (this.flowing) this.emit('data', chunk)
      +    else this[BUFFERPUSH](chunk)
      +
      +    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +
      +    if (cb) fn(cb)
      +
      +    return this.flowing
      +  }
      +
      +  read(n) {
      +    if (this[DESTROYED]) return null
      +
      +    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
      +      this[MAYBE_EMIT_END]()
      +      return null
      +    }
      +
      +    if (this[OBJECTMODE]) n = null
      +
      +    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
      +      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
      +      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
      +    }
      +
      +    const ret = this[READ](n || null, this[BUFFER][0])
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [READ](n, chunk) {
      +    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
      +    else {
      +      this[BUFFER][0] = chunk.slice(n)
      +      chunk = chunk.slice(0, n)
      +      this[BUFFERLENGTH] -= n
      +    }
      +
      +    this.emit('data', chunk)
      +
      +    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
      +
      +    return chunk
      +  }
      +
      +  end(chunk, encoding, cb) {
      +    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
      +    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
      +    if (chunk) this.write(chunk, encoding)
      +    if (cb) this.once('end', cb)
      +    this[EOF] = true
      +    this.writable = false
      +
      +    // if we haven't written anything, then go ahead and emit,
      +    // even if we're not reading.
      +    // we'll re-emit if a new 'end' listener is added anyway.
      +    // This makes MP more suitable to write-only use cases.
      +    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
      +    return this
      +  }
      +
      +  // don't let the internal resume be overwritten
      +  [RESUME]() {
      +    if (this[DESTROYED]) return
      +
      +    this[PAUSED] = false
      +    this[FLOWING] = true
      +    this.emit('resume')
      +    if (this[BUFFER].length) this[FLUSH]()
      +    else if (this[EOF]) this[MAYBE_EMIT_END]()
      +    else this.emit('drain')
      +  }
      +
      +  resume() {
      +    return this[RESUME]()
      +  }
      +
      +  pause() {
      +    this[FLOWING] = false
      +    this[PAUSED] = true
      +  }
      +
      +  get destroyed() {
      +    return this[DESTROYED]
      +  }
      +
      +  get flowing() {
      +    return this[FLOWING]
      +  }
      +
      +  get paused() {
      +    return this[PAUSED]
      +  }
      +
      +  [BUFFERPUSH](chunk) {
      +    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
      +    else this[BUFFERLENGTH] += chunk.length
      +    this[BUFFER].push(chunk)
      +  }
      +
      +  [BUFFERSHIFT]() {
      +    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
      +    else this[BUFFERLENGTH] -= this[BUFFER][0].length
      +    return this[BUFFER].shift()
      +  }
      +
      +  [FLUSH](noDrain) {
      +    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
      +
      +    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
      +  }
      +
      +  [FLUSHCHUNK](chunk) {
      +    this.emit('data', chunk)
      +    return this.flowing
      +  }
      +
      +  pipe(dest, opts) {
      +    if (this[DESTROYED]) return
      +
      +    const ended = this[EMITTED_END]
      +    opts = opts || {}
      +    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
      +    else opts.end = opts.end !== false
      +    opts.proxyErrors = !!opts.proxyErrors
      +
      +    // piping an ended stream ends immediately
      +    if (ended) {
      +      if (opts.end) dest.end()
      +    } else {
      +      this[PIPES].push(
      +        !opts.proxyErrors
      +          ? new Pipe(this, dest, opts)
      +          : new PipeProxyErrors(this, dest, opts)
      +      )
      +      if (this[ASYNC]) defer(() => this[RESUME]())
      +      else this[RESUME]()
      +    }
      +
      +    return dest
      +  }
      +
      +  unpipe(dest) {
      +    const p = this[PIPES].find(p => p.dest === dest)
      +    if (p) {
      +      this[PIPES].splice(this[PIPES].indexOf(p), 1)
      +      p.unpipe()
      +    }
      +  }
      +
      +  addListener(ev, fn) {
      +    return this.on(ev, fn)
      +  }
      +
      +  on(ev, fn) {
      +    const ret = super.on(ev, fn)
      +    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
      +    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
      +      super.emit('readable')
      +    else if (isEndish(ev) && this[EMITTED_END]) {
      +      super.emit(ev)
      +      this.removeAllListeners(ev)
      +    } else if (ev === 'error' && this[EMITTED_ERROR]) {
      +      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
      +      else fn.call(this, this[EMITTED_ERROR])
      +    }
      +    return ret
      +  }
      +
      +  get emittedEnd() {
      +    return this[EMITTED_END]
      +  }
      +
      +  [MAYBE_EMIT_END]() {
      +    if (
      +      !this[EMITTING_END] &&
      +      !this[EMITTED_END] &&
      +      !this[DESTROYED] &&
      +      this[BUFFER].length === 0 &&
      +      this[EOF]
      +    ) {
      +      this[EMITTING_END] = true
      +      this.emit('end')
      +      this.emit('prefinish')
      +      this.emit('finish')
      +      if (this[CLOSED]) this.emit('close')
      +      this[EMITTING_END] = false
      +    }
      +  }
      +
      +  emit(ev, data, ...extra) {
      +    // error and close are only events allowed after calling destroy()
      +    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
      +      return
      +    else if (ev === 'data') {
      +      return !this[OBJECTMODE] && !data
      +        ? false
      +        : this[ASYNC]
      +        ? defer(() => this[EMITDATA](data))
      +        : this[EMITDATA](data)
      +    } else if (ev === 'end') {
      +      return this[EMITEND]()
      +    } else if (ev === 'close') {
      +      this[CLOSED] = true
      +      // don't emit close before 'end' and 'finish'
      +      if (!this[EMITTED_END] && !this[DESTROYED]) return
      +      const ret = super.emit('close')
      +      this.removeAllListeners('close')
      +      return ret
      +    } else if (ev === 'error') {
      +      this[EMITTED_ERROR] = data
      +      super.emit(ERROR, data)
      +      const ret =
      +        !this[SIGNAL] || this.listeners('error').length
      +          ? super.emit('error', data)
      +          : false
      +      this[MAYBE_EMIT_END]()
      +      return ret
      +    } else if (ev === 'resume') {
      +      const ret = super.emit('resume')
      +      this[MAYBE_EMIT_END]()
      +      return ret
      +    } else if (ev === 'finish' || ev === 'prefinish') {
      +      const ret = super.emit(ev)
      +      this.removeAllListeners(ev)
      +      return ret
      +    }
      +
      +    // Some other unknown event
      +    const ret = super.emit(ev, data, ...extra)
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [EMITDATA](data) {
      +    for (const p of this[PIPES]) {
      +      if (p.dest.write(data) === false) this.pause()
      +    }
      +    const ret = super.emit('data', data)
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [EMITEND]() {
      +    if (this[EMITTED_END]) return
      +
      +    this[EMITTED_END] = true
      +    this.readable = false
      +    if (this[ASYNC]) defer(() => this[EMITEND2]())
      +    else this[EMITEND2]()
      +  }
      +
      +  [EMITEND2]() {
      +    if (this[DECODER]) {
      +      const data = this[DECODER].end()
      +      if (data) {
      +        for (const p of this[PIPES]) {
      +          p.dest.write(data)
      +        }
      +        super.emit('data', data)
      +      }
      +    }
      +
      +    for (const p of this[PIPES]) {
      +      p.end()
      +    }
      +    const ret = super.emit('end')
      +    this.removeAllListeners('end')
      +    return ret
      +  }
      +
      +  // const all = await stream.collect()
      +  collect() {
      +    const buf = []
      +    if (!this[OBJECTMODE]) buf.dataLength = 0
      +    // set the promise first, in case an error is raised
      +    // by triggering the flow here.
      +    const p = this.promise()
      +    this.on('data', c => {
      +      buf.push(c)
      +      if (!this[OBJECTMODE]) buf.dataLength += c.length
      +    })
      +    return p.then(() => buf)
      +  }
      +
      +  // const data = await stream.concat()
      +  concat() {
      +    return this[OBJECTMODE]
      +      ? Promise.reject(new Error('cannot concat in objectMode'))
      +      : this.collect().then(buf =>
      +          this[OBJECTMODE]
      +            ? Promise.reject(new Error('cannot concat in objectMode'))
      +            : this[ENCODING]
      +            ? buf.join('')
      +            : Buffer.concat(buf, buf.dataLength)
      +        )
      +  }
      +
      +  // stream.promise().then(() => done, er => emitted error)
      +  promise() {
      +    return new Promise((resolve, reject) => {
      +      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
      +      this.on('error', er => reject(er))
      +      this.on('end', () => resolve())
      +    })
      +  }
      +
      +  // for await (let chunk of stream)
      +  [ASYNCITERATOR]() {
      +    let stopped = false
      +    const stop = () => {
      +      this.pause()
      +      stopped = true
      +      return Promise.resolve({ done: true })
      +    }
      +    const next = () => {
      +      if (stopped) return stop()
      +      const res = this.read()
      +      if (res !== null) return Promise.resolve({ done: false, value: res })
      +
      +      if (this[EOF]) return stop()
      +
      +      let resolve = null
      +      let reject = null
      +      const onerr = er => {
      +        this.removeListener('data', ondata)
      +        this.removeListener('end', onend)
      +        this.removeListener(DESTROYED, ondestroy)
      +        stop()
      +        reject(er)
      +      }
      +      const ondata = value => {
      +        this.removeListener('error', onerr)
      +        this.removeListener('end', onend)
      +        this.removeListener(DESTROYED, ondestroy)
      +        this.pause()
      +        resolve({ value: value, done: !!this[EOF] })
      +      }
      +      const onend = () => {
      +        this.removeListener('error', onerr)
      +        this.removeListener('data', ondata)
      +        this.removeListener(DESTROYED, ondestroy)
      +        stop()
      +        resolve({ done: true })
      +      }
      +      const ondestroy = () => onerr(new Error('stream destroyed'))
      +      return new Promise((res, rej) => {
      +        reject = rej
      +        resolve = res
      +        this.once(DESTROYED, ondestroy)
      +        this.once('error', onerr)
      +        this.once('end', onend)
      +        this.once('data', ondata)
      +      })
      +    }
      +
      +    return {
      +      next,
      +      throw: stop,
      +      return: stop,
      +      [ASYNCITERATOR]() {
      +        return this
      +      },
      +    }
      +  }
      +
      +  // for (let chunk of stream)
      +  [ITERATOR]() {
      +    let stopped = false
      +    const stop = () => {
      +      this.pause()
      +      this.removeListener(ERROR, stop)
      +      this.removeListener(DESTROYED, stop)
      +      this.removeListener('end', stop)
      +      stopped = true
      +      return { done: true }
      +    }
      +
      +    const next = () => {
      +      if (stopped) return stop()
      +      const value = this.read()
      +      return value === null ? stop() : { value }
      +    }
      +    this.once('end', stop)
      +    this.once(ERROR, stop)
      +    this.once(DESTROYED, stop)
      +
      +    return {
      +      next,
      +      throw: stop,
      +      return: stop,
      +      [ITERATOR]() {
      +        return this
      +      },
      +    }
      +  }
      +
      +  destroy(er) {
      +    if (this[DESTROYED]) {
      +      if (er) this.emit('error', er)
      +      else this.emit(DESTROYED)
      +      return this
      +    }
      +
      +    this[DESTROYED] = true
      +
      +    // throw away all buffered data, it's never coming out
      +    this[BUFFER].length = 0
      +    this[BUFFERLENGTH] = 0
      +
      +    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
      +
      +    if (er) this.emit('error', er)
      +    // if no error to emit, still reject pending promises
      +    else this.emit(DESTROYED)
      +
      +    return this
      +  }
      +
      +  static isStream(s) {
      +    return (
      +      !!s &&
      +      (s instanceof Minipass ||
      +        s instanceof Stream ||
      +        (s instanceof EE &&
      +          // readable
      +          (typeof s.pipe === 'function' ||
      +            // writable
      +            (typeof s.write === 'function' && typeof s.end === 'function'))))
      +    )
      +  }
      +}
      +
      +exports.Minipass = Minipass
      diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.mjs b/deps/npm/node_modules/tar/node_modules/minipass/index.mjs
      new file mode 100644
      index 00000000000000..89b3fbf1a4d445
      --- /dev/null
      +++ b/deps/npm/node_modules/tar/node_modules/minipass/index.mjs
      @@ -0,0 +1,700 @@
      +'use strict'
      +const proc =
      +  typeof process === 'object' && process
      +    ? process
      +    : {
      +        stdout: null,
      +        stderr: null,
      +      }
      +import EE from 'events'
      +import Stream from 'stream'
      +import stringdecoder from 'string_decoder'
      +const SD = stringdecoder.StringDecoder
      +
      +const EOF = Symbol('EOF')
      +const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
      +const EMITTED_END = Symbol('emittedEnd')
      +const EMITTING_END = Symbol('emittingEnd')
      +const EMITTED_ERROR = Symbol('emittedError')
      +const CLOSED = Symbol('closed')
      +const READ = Symbol('read')
      +const FLUSH = Symbol('flush')
      +const FLUSHCHUNK = Symbol('flushChunk')
      +const ENCODING = Symbol('encoding')
      +const DECODER = Symbol('decoder')
      +const FLOWING = Symbol('flowing')
      +const PAUSED = Symbol('paused')
      +const RESUME = Symbol('resume')
      +const BUFFER = Symbol('buffer')
      +const PIPES = Symbol('pipes')
      +const BUFFERLENGTH = Symbol('bufferLength')
      +const BUFFERPUSH = Symbol('bufferPush')
      +const BUFFERSHIFT = Symbol('bufferShift')
      +const OBJECTMODE = Symbol('objectMode')
      +// internal event when stream is destroyed
      +const DESTROYED = Symbol('destroyed')
      +// internal event when stream has an error
      +const ERROR = Symbol('error')
      +const EMITDATA = Symbol('emitData')
      +const EMITEND = Symbol('emitEnd')
      +const EMITEND2 = Symbol('emitEnd2')
      +const ASYNC = Symbol('async')
      +const ABORT = Symbol('abort')
      +const ABORTED = Symbol('aborted')
      +const SIGNAL = Symbol('signal')
      +
      +const defer = fn => Promise.resolve().then(fn)
      +
      +// TODO remove when Node v8 support drops
      +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
      +const ASYNCITERATOR =
      +  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
      +const ITERATOR =
      +  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
      +
      +// events that mean 'the stream is over'
      +// these are treated specially, and re-emitted
      +// if they are listened for after emitting.
      +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
      +
      +const isArrayBuffer = b =>
      +  b instanceof ArrayBuffer ||
      +  (typeof b === 'object' &&
      +    b.constructor &&
      +    b.constructor.name === 'ArrayBuffer' &&
      +    b.byteLength >= 0)
      +
      +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
      +
      +class Pipe {
      +  constructor(src, dest, opts) {
      +    this.src = src
      +    this.dest = dest
      +    this.opts = opts
      +    this.ondrain = () => src[RESUME]()
      +    dest.on('drain', this.ondrain)
      +  }
      +  unpipe() {
      +    this.dest.removeListener('drain', this.ondrain)
      +  }
      +  // istanbul ignore next - only here for the prototype
      +  proxyErrors() {}
      +  end() {
      +    this.unpipe()
      +    if (this.opts.end) this.dest.end()
      +  }
      +}
      +
      +class PipeProxyErrors extends Pipe {
      +  unpipe() {
      +    this.src.removeListener('error', this.proxyErrors)
      +    super.unpipe()
      +  }
      +  constructor(src, dest, opts) {
      +    super(src, dest, opts)
      +    this.proxyErrors = er => dest.emit('error', er)
      +    src.on('error', this.proxyErrors)
      +  }
      +}
      +
      +export class Minipass extends Stream {
      +  constructor(options) {
      +    super()
      +    this[FLOWING] = false
      +    // whether we're explicitly paused
      +    this[PAUSED] = false
      +    this[PIPES] = []
      +    this[BUFFER] = []
      +    this[OBJECTMODE] = (options && options.objectMode) || false
      +    if (this[OBJECTMODE]) this[ENCODING] = null
      +    else this[ENCODING] = (options && options.encoding) || null
      +    if (this[ENCODING] === 'buffer') this[ENCODING] = null
      +    this[ASYNC] = (options && !!options.async) || false
      +    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
      +    this[EOF] = false
      +    this[EMITTED_END] = false
      +    this[EMITTING_END] = false
      +    this[CLOSED] = false
      +    this[EMITTED_ERROR] = null
      +    this.writable = true
      +    this.readable = true
      +    this[BUFFERLENGTH] = 0
      +    this[DESTROYED] = false
      +    if (options && options.debugExposeBuffer === true) {
      +      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
      +    }
      +    if (options && options.debugExposePipes === true) {
      +      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
      +    }
      +    this[SIGNAL] = options && options.signal
      +    this[ABORTED] = false
      +    if (this[SIGNAL]) {
      +      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
      +      if (this[SIGNAL].aborted) {
      +        this[ABORT]()
      +      }
      +    }
      +  }
      +
      +  get bufferLength() {
      +    return this[BUFFERLENGTH]
      +  }
      +
      +  get encoding() {
      +    return this[ENCODING]
      +  }
      +  set encoding(enc) {
      +    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
      +
      +    if (
      +      this[ENCODING] &&
      +      enc !== this[ENCODING] &&
      +      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
      +    )
      +      throw new Error('cannot change encoding')
      +
      +    if (this[ENCODING] !== enc) {
      +      this[DECODER] = enc ? new SD(enc) : null
      +      if (this[BUFFER].length)
      +        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
      +    }
      +
      +    this[ENCODING] = enc
      +  }
      +
      +  setEncoding(enc) {
      +    this.encoding = enc
      +  }
      +
      +  get objectMode() {
      +    return this[OBJECTMODE]
      +  }
      +  set objectMode(om) {
      +    this[OBJECTMODE] = this[OBJECTMODE] || !!om
      +  }
      +
      +  get ['async']() {
      +    return this[ASYNC]
      +  }
      +  set ['async'](a) {
      +    this[ASYNC] = this[ASYNC] || !!a
      +  }
      +
      +  // drop everything and get out of the flow completely
      +  [ABORT]() {
      +    this[ABORTED] = true
      +    this.emit('abort', this[SIGNAL].reason)
      +    this.destroy(this[SIGNAL].reason)
      +  }
      +
      +  get aborted() {
      +    return this[ABORTED]
      +  }
      +  set aborted(_) {}
      +
      +  write(chunk, encoding, cb) {
      +    if (this[ABORTED]) return false
      +    if (this[EOF]) throw new Error('write after end')
      +
      +    if (this[DESTROYED]) {
      +      this.emit(
      +        'error',
      +        Object.assign(
      +          new Error('Cannot call write after a stream was destroyed'),
      +          { code: 'ERR_STREAM_DESTROYED' }
      +        )
      +      )
      +      return true
      +    }
      +
      +    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
      +
      +    if (!encoding) encoding = 'utf8'
      +
      +    const fn = this[ASYNC] ? defer : f => f()
      +
      +    // convert array buffers and typed array views into buffers
      +    // at some point in the future, we may want to do the opposite!
      +    // leave strings and buffers as-is
      +    // anything else switches us into object mode
      +    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
      +      if (isArrayBufferView(chunk))
      +        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
      +      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
      +      else if (typeof chunk !== 'string')
      +        // use the setter so we throw if we have encoding set
      +        this.objectMode = true
      +    }
      +
      +    // handle object mode up front, since it's simpler
      +    // this yields better performance, fewer checks later.
      +    if (this[OBJECTMODE]) {
      +      /* istanbul ignore if - maybe impossible? */
      +      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
      +
      +      if (this.flowing) this.emit('data', chunk)
      +      else this[BUFFERPUSH](chunk)
      +
      +      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +
      +      if (cb) fn(cb)
      +
      +      return this.flowing
      +    }
      +
      +    // at this point the chunk is a buffer or string
      +    // don't buffer it up or send it to the decoder
      +    if (!chunk.length) {
      +      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +      if (cb) fn(cb)
      +      return this.flowing
      +    }
      +
      +    // fast-path writing strings of same encoding to a stream with
      +    // an empty buffer, skipping the buffer/decoder dance
      +    if (
      +      typeof chunk === 'string' &&
      +      // unless it is a string already ready for us to use
      +      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
      +    ) {
      +      chunk = Buffer.from(chunk, encoding)
      +    }
      +
      +    if (Buffer.isBuffer(chunk) && this[ENCODING])
      +      chunk = this[DECODER].write(chunk)
      +
      +    // Note: flushing CAN potentially switch us into not-flowing mode
      +    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
      +
      +    if (this.flowing) this.emit('data', chunk)
      +    else this[BUFFERPUSH](chunk)
      +
      +    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
      +
      +    if (cb) fn(cb)
      +
      +    return this.flowing
      +  }
      +
      +  read(n) {
      +    if (this[DESTROYED]) return null
      +
      +    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
      +      this[MAYBE_EMIT_END]()
      +      return null
      +    }
      +
      +    if (this[OBJECTMODE]) n = null
      +
      +    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
      +      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
      +      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
      +    }
      +
      +    const ret = this[READ](n || null, this[BUFFER][0])
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [READ](n, chunk) {
      +    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
      +    else {
      +      this[BUFFER][0] = chunk.slice(n)
      +      chunk = chunk.slice(0, n)
      +      this[BUFFERLENGTH] -= n
      +    }
      +
      +    this.emit('data', chunk)
      +
      +    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
      +
      +    return chunk
      +  }
      +
      +  end(chunk, encoding, cb) {
      +    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
      +    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
      +    if (chunk) this.write(chunk, encoding)
      +    if (cb) this.once('end', cb)
      +    this[EOF] = true
      +    this.writable = false
      +
      +    // if we haven't written anything, then go ahead and emit,
      +    // even if we're not reading.
      +    // we'll re-emit if a new 'end' listener is added anyway.
      +    // This makes MP more suitable to write-only use cases.
      +    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
      +    return this
      +  }
      +
      +  // don't let the internal resume be overwritten
      +  [RESUME]() {
      +    if (this[DESTROYED]) return
      +
      +    this[PAUSED] = false
      +    this[FLOWING] = true
      +    this.emit('resume')
      +    if (this[BUFFER].length) this[FLUSH]()
      +    else if (this[EOF]) this[MAYBE_EMIT_END]()
      +    else this.emit('drain')
      +  }
      +
      +  resume() {
      +    return this[RESUME]()
      +  }
      +
      +  pause() {
      +    this[FLOWING] = false
      +    this[PAUSED] = true
      +  }
      +
      +  get destroyed() {
      +    return this[DESTROYED]
      +  }
      +
      +  get flowing() {
      +    return this[FLOWING]
      +  }
      +
      +  get paused() {
      +    return this[PAUSED]
      +  }
      +
      +  [BUFFERPUSH](chunk) {
      +    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
      +    else this[BUFFERLENGTH] += chunk.length
      +    this[BUFFER].push(chunk)
      +  }
      +
      +  [BUFFERSHIFT]() {
      +    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
      +    else this[BUFFERLENGTH] -= this[BUFFER][0].length
      +    return this[BUFFER].shift()
      +  }
      +
      +  [FLUSH](noDrain) {
      +    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
      +
      +    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
      +  }
      +
      +  [FLUSHCHUNK](chunk) {
      +    this.emit('data', chunk)
      +    return this.flowing
      +  }
      +
      +  pipe(dest, opts) {
      +    if (this[DESTROYED]) return
      +
      +    const ended = this[EMITTED_END]
      +    opts = opts || {}
      +    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
      +    else opts.end = opts.end !== false
      +    opts.proxyErrors = !!opts.proxyErrors
      +
      +    // piping an ended stream ends immediately
      +    if (ended) {
      +      if (opts.end) dest.end()
      +    } else {
      +      this[PIPES].push(
      +        !opts.proxyErrors
      +          ? new Pipe(this, dest, opts)
      +          : new PipeProxyErrors(this, dest, opts)
      +      )
      +      if (this[ASYNC]) defer(() => this[RESUME]())
      +      else this[RESUME]()
      +    }
      +
      +    return dest
      +  }
      +
      +  unpipe(dest) {
      +    const p = this[PIPES].find(p => p.dest === dest)
      +    if (p) {
      +      this[PIPES].splice(this[PIPES].indexOf(p), 1)
      +      p.unpipe()
      +    }
      +  }
      +
      +  addListener(ev, fn) {
      +    return this.on(ev, fn)
      +  }
      +
      +  on(ev, fn) {
      +    const ret = super.on(ev, fn)
      +    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
      +    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
      +      super.emit('readable')
      +    else if (isEndish(ev) && this[EMITTED_END]) {
      +      super.emit(ev)
      +      this.removeAllListeners(ev)
      +    } else if (ev === 'error' && this[EMITTED_ERROR]) {
      +      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
      +      else fn.call(this, this[EMITTED_ERROR])
      +    }
      +    return ret
      +  }
      +
      +  get emittedEnd() {
      +    return this[EMITTED_END]
      +  }
      +
      +  [MAYBE_EMIT_END]() {
      +    if (
      +      !this[EMITTING_END] &&
      +      !this[EMITTED_END] &&
      +      !this[DESTROYED] &&
      +      this[BUFFER].length === 0 &&
      +      this[EOF]
      +    ) {
      +      this[EMITTING_END] = true
      +      this.emit('end')
      +      this.emit('prefinish')
      +      this.emit('finish')
      +      if (this[CLOSED]) this.emit('close')
      +      this[EMITTING_END] = false
      +    }
      +  }
      +
      +  emit(ev, data, ...extra) {
      +    // error and close are only events allowed after calling destroy()
      +    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
      +      return
      +    else if (ev === 'data') {
      +      return !this[OBJECTMODE] && !data
      +        ? false
      +        : this[ASYNC]
      +        ? defer(() => this[EMITDATA](data))
      +        : this[EMITDATA](data)
      +    } else if (ev === 'end') {
      +      return this[EMITEND]()
      +    } else if (ev === 'close') {
      +      this[CLOSED] = true
      +      // don't emit close before 'end' and 'finish'
      +      if (!this[EMITTED_END] && !this[DESTROYED]) return
      +      const ret = super.emit('close')
      +      this.removeAllListeners('close')
      +      return ret
      +    } else if (ev === 'error') {
      +      this[EMITTED_ERROR] = data
      +      super.emit(ERROR, data)
      +      const ret =
      +        !this[SIGNAL] || this.listeners('error').length
      +          ? super.emit('error', data)
      +          : false
      +      this[MAYBE_EMIT_END]()
      +      return ret
      +    } else if (ev === 'resume') {
      +      const ret = super.emit('resume')
      +      this[MAYBE_EMIT_END]()
      +      return ret
      +    } else if (ev === 'finish' || ev === 'prefinish') {
      +      const ret = super.emit(ev)
      +      this.removeAllListeners(ev)
      +      return ret
      +    }
      +
      +    // Some other unknown event
      +    const ret = super.emit(ev, data, ...extra)
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [EMITDATA](data) {
      +    for (const p of this[PIPES]) {
      +      if (p.dest.write(data) === false) this.pause()
      +    }
      +    const ret = super.emit('data', data)
      +    this[MAYBE_EMIT_END]()
      +    return ret
      +  }
      +
      +  [EMITEND]() {
      +    if (this[EMITTED_END]) return
      +
      +    this[EMITTED_END] = true
      +    this.readable = false
      +    if (this[ASYNC]) defer(() => this[EMITEND2]())
      +    else this[EMITEND2]()
      +  }
      +
      +  [EMITEND2]() {
      +    if (this[DECODER]) {
      +      const data = this[DECODER].end()
      +      if (data) {
      +        for (const p of this[PIPES]) {
      +          p.dest.write(data)
      +        }
      +        super.emit('data', data)
      +      }
      +    }
      +
      +    for (const p of this[PIPES]) {
      +      p.end()
      +    }
      +    const ret = super.emit('end')
      +    this.removeAllListeners('end')
      +    return ret
      +  }
      +
      +  // const all = await stream.collect()
      +  collect() {
      +    const buf = []
      +    if (!this[OBJECTMODE]) buf.dataLength = 0
      +    // set the promise first, in case an error is raised
      +    // by triggering the flow here.
      +    const p = this.promise()
      +    this.on('data', c => {
      +      buf.push(c)
      +      if (!this[OBJECTMODE]) buf.dataLength += c.length
      +    })
      +    return p.then(() => buf)
      +  }
      +
      +  // const data = await stream.concat()
      +  concat() {
      +    return this[OBJECTMODE]
      +      ? Promise.reject(new Error('cannot concat in objectMode'))
      +      : this.collect().then(buf =>
      +          this[OBJECTMODE]
      +            ? Promise.reject(new Error('cannot concat in objectMode'))
      +            : this[ENCODING]
      +            ? buf.join('')
      +            : Buffer.concat(buf, buf.dataLength)
      +        )
      +  }
      +
      +  // stream.promise().then(() => done, er => emitted error)
      +  promise() {
      +    return new Promise((resolve, reject) => {
      +      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
      +      this.on('error', er => reject(er))
      +      this.on('end', () => resolve())
      +    })
      +  }
      +
      +  // for await (let chunk of stream)
      +  [ASYNCITERATOR]() {
      +    let stopped = false
      +    const stop = () => {
      +      this.pause()
      +      stopped = true
      +      return Promise.resolve({ done: true })
      +    }
      +    const next = () => {
      +      if (stopped) return stop()
      +      const res = this.read()
      +      if (res !== null) return Promise.resolve({ done: false, value: res })
      +
      +      if (this[EOF]) return stop()
      +
      +      let resolve = null
      +      let reject = null
      +      const onerr = er => {
      +        this.removeListener('data', ondata)
      +        this.removeListener('end', onend)
      +        this.removeListener(DESTROYED, ondestroy)
      +        stop()
      +        reject(er)
      +      }
      +      const ondata = value => {
      +        this.removeListener('error', onerr)
      +        this.removeListener('end', onend)
      +        this.removeListener(DESTROYED, ondestroy)
      +        this.pause()
      +        resolve({ value: value, done: !!this[EOF] })
      +      }
      +      const onend = () => {
      +        this.removeListener('error', onerr)
      +        this.removeListener('data', ondata)
      +        this.removeListener(DESTROYED, ondestroy)
      +        stop()
      +        resolve({ done: true })
      +      }
      +      const ondestroy = () => onerr(new Error('stream destroyed'))
      +      return new Promise((res, rej) => {
      +        reject = rej
      +        resolve = res
      +        this.once(DESTROYED, ondestroy)
      +        this.once('error', onerr)
      +        this.once('end', onend)
      +        this.once('data', ondata)
      +      })
      +    }
      +
      +    return {
      +      next,
      +      throw: stop,
      +      return: stop,
      +      [ASYNCITERATOR]() {
      +        return this
      +      },
      +    }
      +  }
      +
      +  // for (let chunk of stream)
      +  [ITERATOR]() {
      +    let stopped = false
      +    const stop = () => {
      +      this.pause()
      +      this.removeListener(ERROR, stop)
      +      this.removeListener(DESTROYED, stop)
      +      this.removeListener('end', stop)
      +      stopped = true
      +      return { done: true }
      +    }
      +
      +    const next = () => {
      +      if (stopped) return stop()
      +      const value = this.read()
      +      return value === null ? stop() : { value }
      +    }
      +    this.once('end', stop)
      +    this.once(ERROR, stop)
      +    this.once(DESTROYED, stop)
      +
      +    return {
      +      next,
      +      throw: stop,
      +      return: stop,
      +      [ITERATOR]() {
      +        return this
      +      },
      +    }
      +  }
      +
      +  destroy(er) {
      +    if (this[DESTROYED]) {
      +      if (er) this.emit('error', er)
      +      else this.emit(DESTROYED)
      +      return this
      +    }
      +
      +    this[DESTROYED] = true
      +
      +    // throw away all buffered data, it's never coming out
      +    this[BUFFER].length = 0
      +    this[BUFFERLENGTH] = 0
      +
      +    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
      +
      +    if (er) this.emit('error', er)
      +    // if no error to emit, still reject pending promises
      +    else this.emit(DESTROYED)
      +
      +    return this
      +  }
      +
      +  static isStream(s) {
      +    return (
      +      !!s &&
      +      (s instanceof Minipass ||
      +        s instanceof Stream ||
      +        (s instanceof EE &&
      +          // readable
      +          (typeof s.pipe === 'function' ||
      +            // writable
      +            (typeof s.write === 'function' && typeof s.end === 'function'))))
      +    )
      +  }
      +}
      diff --git a/deps/npm/node_modules/tar/node_modules/minipass/package.json b/deps/npm/node_modules/tar/node_modules/minipass/package.json
      new file mode 100644
      index 00000000000000..0e20e988047f23
      --- /dev/null
      +++ b/deps/npm/node_modules/tar/node_modules/minipass/package.json
      @@ -0,0 +1,76 @@
      +{
      +  "name": "minipass",
      +  "version": "5.0.0",
      +  "description": "minimal implementation of a PassThrough stream",
      +  "main": "./index.js",
      +  "module": "./index.mjs",
      +  "types": "./index.d.ts",
      +  "exports": {
      +    ".": {
      +      "import": {
      +        "types": "./index.d.ts",
      +        "default": "./index.mjs"
      +      },
      +      "require": {
      +        "types": "./index.d.ts",
      +        "default": "./index.js"
      +      }
      +    },
      +    "./package.json": "./package.json"
      +  },
      +  "devDependencies": {
      +    "@types/node": "^17.0.41",
      +    "end-of-stream": "^1.4.0",
      +    "node-abort-controller": "^3.1.1",
      +    "prettier": "^2.6.2",
      +    "tap": "^16.2.0",
      +    "through2": "^2.0.3",
      +    "ts-node": "^10.8.1",
      +    "typedoc": "^0.23.24",
      +    "typescript": "^4.7.3"
      +  },
      +  "scripts": {
      +    "pretest": "npm run prepare",
      +    "presnap": "npm run prepare",
      +    "prepare": "node ./scripts/transpile-to-esm.js",
      +    "snap": "tap",
      +    "test": "tap",
      +    "preversion": "npm test",
      +    "postversion": "npm publish",
      +    "postpublish": "git push origin --follow-tags",
      +    "typedoc": "typedoc ./index.d.ts",
      +    "format": "prettier --write . --loglevel warn"
      +  },
      +  "repository": {
      +    "type": "git",
      +    "url": "git+https://github.com/isaacs/minipass.git"
      +  },
      +  "keywords": [
      +    "passthrough",
      +    "stream"
      +  ],
      +  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
      +  "license": "ISC",
      +  "files": [
      +    "index.d.ts",
      +    "index.js",
      +    "index.mjs"
      +  ],
      +  "tap": {
      +    "check-coverage": true
      +  },
      +  "engines": {
      +    "node": ">=8"
      +  },
      +  "prettier": {
      +    "semi": false,
      +    "printWidth": 80,
      +    "tabWidth": 2,
      +    "useTabs": false,
      +    "singleQuote": true,
      +    "jsxSingleQuote": false,
      +    "bracketSameLine": true,
      +    "arrowParens": "avoid",
      +    "endOfLine": "lf"
      +  }
      +}
      diff --git a/deps/npm/node_modules/tuf-js/dist/config.js b/deps/npm/node_modules/tuf-js/dist/config.js
      index c2d970e2562449..bafb33a8a1bf7c 100644
      --- a/deps/npm/node_modules/tuf-js/dist/config.js
      +++ b/deps/npm/node_modules/tuf-js/dist/config.js
      @@ -10,5 +10,6 @@ exports.defaultConfig = {
           targetsMaxLength: 5000000,
           prefixTargetsWithHash: true,
           fetchTimeout: 100000,
      -    fetchRetries: 2,
      +    fetchRetries: undefined,
      +    fetchRetry: 2,
       };
      diff --git a/deps/npm/node_modules/tuf-js/dist/fetcher.js b/deps/npm/node_modules/tuf-js/dist/fetcher.js
      index d3dcf53eeb8697..f966ce1bb0cdc6 100644
      --- a/deps/npm/node_modules/tuf-js/dist/fetcher.js
      +++ b/deps/npm/node_modules/tuf-js/dist/fetcher.js
      @@ -57,13 +57,13 @@ class DefaultFetcher extends BaseFetcher {
           constructor(options = {}) {
               super();
               this.timeout = options.timeout;
      -        this.retries = options.retries;
      +        this.retry = options.retry;
           }
           async fetch(url) {
               log('GET %s', url);
               const response = await (0, make_fetch_happen_1.default)(url, {
                   timeout: this.timeout,
      -            retry: this.retries,
      +            retry: this.retry,
               });
               if (!response.ok || !response?.body) {
                   throw new error_1.DownloadHTTPError('Failed to download', response.status);
      diff --git a/deps/npm/node_modules/tuf-js/dist/updater.js b/deps/npm/node_modules/tuf-js/dist/updater.js
      index 2aba48d24affd5..2d0c769c7af647 100644
      --- a/deps/npm/node_modules/tuf-js/dist/updater.js
      +++ b/deps/npm/node_modules/tuf-js/dist/updater.js
      @@ -51,7 +51,7 @@ class Updater {
                   fetcher ||
                       new fetcher_1.DefaultFetcher({
                           timeout: this.config.fetchTimeout,
      -                    retries: this.config.fetchRetries,
      +                    retry: this.config.fetchRetries ?? this.config.fetchRetry,
                       });
           }
           // refresh and load the metadata before downloading the target
      @@ -306,7 +306,7 @@ class Updater {
               const filePath = encodeURIComponent(targetInfo.path);
               return path.join(this.targetDir, filePath);
           }
      -    async persistMetadata(metaDataName, bytesData) {
      +    persistMetadata(metaDataName, bytesData) {
               try {
                   const filePath = path.join(this.dir, `${metaDataName}.json`);
                   log('WRITE %s', filePath);
      diff --git a/deps/npm/node_modules/tuf-js/package.json b/deps/npm/node_modules/tuf-js/package.json
      index 9187d88083272c..c757d6a00d7008 100644
      --- a/deps/npm/node_modules/tuf-js/package.json
      +++ b/deps/npm/node_modules/tuf-js/package.json
      @@ -1,6 +1,6 @@
       {
         "name": "tuf-js",
      -  "version": "1.1.7",
      +  "version": "2.1.0",
         "description": "JavaScript implementation of The Update Framework (TUF)",
         "main": "dist/index.js",
         "types": "dist/index.d.ts",
      @@ -28,19 +28,16 @@
         },
         "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
         "devDependencies": {
      -    "@tufjs/repo-mock": "1.3.1",
      +    "@tufjs/repo-mock": "2.0.0",
           "@types/debug": "^4.1.8",
      -    "@types/make-fetch-happen": "^10.0.1",
      -    "@types/node": "^20.2.5",
      -    "nock": "^13.3.1",
      -    "typescript": "^5.1.3"
      +    "@types/make-fetch-happen": "^10.0.1"
         },
         "dependencies": {
      -    "@tufjs/models": "1.0.4",
      +    "@tufjs/models": "2.0.0",
           "debug": "^4.3.4",
      -    "make-fetch-happen": "^11.1.1"
      +    "make-fetch-happen": "^13.0.0"
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.14.0 || >=18.0.0"
         }
       }
      diff --git a/deps/npm/node_modules/which/lib/index.js b/deps/npm/node_modules/which/lib/index.js
      index 52e9ea62377e74..2fd358baf888fd 100644
      --- a/deps/npm/node_modules/which/lib/index.js
      +++ b/deps/npm/node_modules/which/lib/index.js
      @@ -1,4 +1,4 @@
      -const isexe = require('isexe')
      +const { isexe, sync: isexeSync } = require('isexe')
       const { join, delimiter, sep, posix } = require('path')
       
       const isWindows = process.platform === 'win32'
      @@ -31,11 +31,7 @@ const getPathInfo = (cmd, {
         if (isWindows) {
           const pathExtExe = optPathExt ||
             ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
      -    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
      -      acc.push(item)
      -      acc.push(item.toLowerCase())
      -      return acc
      -    }, [])
      +    const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()])
           if (cmd.includes('.') && pathExt[0] !== '') {
             pathExt.unshift('')
           }
      @@ -90,7 +86,7 @@ const whichSync = (cmd, opt = {}) => {
       
           for (const ext of pathExt) {
             const withExt = p + ext
      -      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
      +      const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
             if (is) {
               if (!opt.all) {
                 return withExt
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/LICENSE b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
      new file mode 100644
      index 00000000000000..c925dbe826b670
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
      @@ -0,0 +1,15 @@
      +The ISC License
      +
      +Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
      +
      +Permission to use, copy, modify, and/or distribute this software for any
      +purpose with or without fee is hereby granted, provided that the above
      +copyright notice and this permission notice appear in all copies.
      +
      +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
      +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
      +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
      +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
      +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
      +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
      +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js
      new file mode 100644
      index 00000000000000..cefcb66b5c5434
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js
      @@ -0,0 +1,46 @@
      +"use strict";
      +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
      +    if (k2 === undefined) k2 = k;
      +    var desc = Object.getOwnPropertyDescriptor(m, k);
      +    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
      +      desc = { enumerable: true, get: function() { return m[k]; } };
      +    }
      +    Object.defineProperty(o, k2, desc);
      +}) : (function(o, m, k, k2) {
      +    if (k2 === undefined) k2 = k;
      +    o[k2] = m[k];
      +}));
      +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
      +    Object.defineProperty(o, "default", { enumerable: true, value: v });
      +}) : function(o, v) {
      +    o["default"] = v;
      +});
      +var __importStar = (this && this.__importStar) || function (mod) {
      +    if (mod && mod.__esModule) return mod;
      +    var result = {};
      +    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
      +    __setModuleDefault(result, mod);
      +    return result;
      +};
      +var __exportStar = (this && this.__exportStar) || function(m, exports) {
      +    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
      +};
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.sync = exports.isexe = exports.posix = exports.win32 = void 0;
      +const posix = __importStar(require("./posix.js"));
      +exports.posix = posix;
      +const win32 = __importStar(require("./win32.js"));
      +exports.win32 = win32;
      +__exportStar(require("./options.js"), exports);
      +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
      +const impl = platform === 'win32' ? win32 : posix;
      +/**
      + * Determine whether a path is executable on the current platform.
      + */
      +exports.isexe = impl.isexe;
      +/**
      + * Synchronously determine whether a path is executable on the
      + * current platform.
      + */
      +exports.sync = impl.sync;
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js
      new file mode 100644
      index 00000000000000..0dfad0762cc32c
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js
      @@ -0,0 +1,3 @@
      +"use strict";
      +Object.defineProperty(exports, "__esModule", { value: true });
      +//# sourceMappingURL=options.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json
      new file mode 100644
      index 00000000000000..5bbefffbabee39
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "commonjs"
      +}
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js
      new file mode 100644
      index 00000000000000..3bc5e79d7007e9
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js
      @@ -0,0 +1,67 @@
      +"use strict";
      +/**
      + * This is the Posix implementation of isexe, which uses the file
      + * mode and uid/gid values.
      + *
      + * @module
      + */
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.sync = exports.isexe = void 0;
      +const fs_1 = require("fs");
      +const promises_1 = require("fs/promises");
      +/**
      + * Determine whether a path is executable according to the mode and
      + * current (or specified) user and group IDs.
      + */
      +const isexe = async (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(await (0, promises_1.stat)(path), options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +exports.isexe = isexe;
      +/**
      + * Synchronously determine whether a path is executable according to
      + * the mode and current (or specified) user and group IDs.
      + */
      +const sync = (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat((0, fs_1.statSync)(path), options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +exports.sync = sync;
      +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
      +const checkMode = (stat, options) => {
      +    const myUid = options.uid ?? process.getuid?.();
      +    const myGroups = options.groups ?? process.getgroups?.() ?? [];
      +    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
      +    if (myUid === undefined || myGid === undefined) {
      +        throw new Error('cannot get uid or gid');
      +    }
      +    const groups = new Set([myGid, ...myGroups]);
      +    const mod = stat.mode;
      +    const uid = stat.uid;
      +    const gid = stat.gid;
      +    const u = parseInt('100', 8);
      +    const g = parseInt('010', 8);
      +    const o = parseInt('001', 8);
      +    const ug = u | g;
      +    return !!(mod & o ||
      +        (mod & g && groups.has(gid)) ||
      +        (mod & u && uid === myUid) ||
      +        (mod & ug && myUid === 0));
      +};
      +//# sourceMappingURL=posix.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js
      new file mode 100644
      index 00000000000000..fa7a4d2f7d240d
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js
      @@ -0,0 +1,62 @@
      +"use strict";
      +/**
      + * This is the Windows implementation of isexe, which uses the file
      + * extension and PATHEXT setting.
      + *
      + * @module
      + */
      +Object.defineProperty(exports, "__esModule", { value: true });
      +exports.sync = exports.isexe = void 0;
      +const fs_1 = require("fs");
      +const promises_1 = require("fs/promises");
      +/**
      + * Determine whether a path is executable based on the file extension
      + * and PATHEXT environment variable (or specified pathExt option)
      + */
      +const isexe = async (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(await (0, promises_1.stat)(path), path, options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +exports.isexe = isexe;
      +/**
      + * Synchronously determine whether a path is executable based on the file
      + * extension and PATHEXT environment variable (or specified pathExt option)
      + */
      +const sync = (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat((0, fs_1.statSync)(path), path, options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +exports.sync = sync;
      +const checkPathExt = (path, options) => {
      +    const { pathExt = process.env.PATHEXT || '' } = options;
      +    const peSplit = pathExt.split(';');
      +    if (peSplit.indexOf('') !== -1) {
      +        return true;
      +    }
      +    for (let i = 0; i < peSplit.length; i++) {
      +        const p = peSplit[i].toLowerCase();
      +        const ext = path.substring(path.length - p.length).toLowerCase();
      +        if (p && ext === p) {
      +            return true;
      +        }
      +    }
      +    return false;
      +};
      +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
      +//# sourceMappingURL=win32.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js
      new file mode 100644
      index 00000000000000..1e309acd7355ec
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js
      @@ -0,0 +1,16 @@
      +import * as posix from './posix.js';
      +import * as win32 from './win32.js';
      +export * from './options.js';
      +export { win32, posix };
      +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
      +const impl = platform === 'win32' ? win32 : posix;
      +/**
      + * Determine whether a path is executable on the current platform.
      + */
      +export const isexe = impl.isexe;
      +/**
      + * Synchronously determine whether a path is executable on the
      + * current platform.
      + */
      +export const sync = impl.sync;
      +//# sourceMappingURL=index.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js
      new file mode 100644
      index 00000000000000..e9ded40bd5b2cd
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js
      @@ -0,0 +1,2 @@
      +export {};
      +//# sourceMappingURL=options.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json
      new file mode 100644
      index 00000000000000..3dbc1ca591c055
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json
      @@ -0,0 +1,3 @@
      +{
      +  "type": "module"
      +}
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js
      new file mode 100644
      index 00000000000000..c453776c0452f7
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js
      @@ -0,0 +1,62 @@
      +/**
      + * This is the Posix implementation of isexe, which uses the file
      + * mode and uid/gid values.
      + *
      + * @module
      + */
      +import { statSync } from 'fs';
      +import { stat } from 'fs/promises';
      +/**
      + * Determine whether a path is executable according to the mode and
      + * current (or specified) user and group IDs.
      + */
      +export const isexe = async (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(await stat(path), options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +/**
      + * Synchronously determine whether a path is executable according to
      + * the mode and current (or specified) user and group IDs.
      + */
      +export const sync = (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(statSync(path), options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
      +const checkMode = (stat, options) => {
      +    const myUid = options.uid ?? process.getuid?.();
      +    const myGroups = options.groups ?? process.getgroups?.() ?? [];
      +    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
      +    if (myUid === undefined || myGid === undefined) {
      +        throw new Error('cannot get uid or gid');
      +    }
      +    const groups = new Set([myGid, ...myGroups]);
      +    const mod = stat.mode;
      +    const uid = stat.uid;
      +    const gid = stat.gid;
      +    const u = parseInt('100', 8);
      +    const g = parseInt('010', 8);
      +    const o = parseInt('001', 8);
      +    const ug = u | g;
      +    return !!(mod & o ||
      +        (mod & g && groups.has(gid)) ||
      +        (mod & u && uid === myUid) ||
      +        (mod & ug && myUid === 0));
      +};
      +//# sourceMappingURL=posix.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js
      new file mode 100644
      index 00000000000000..a354ee2a5115c7
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js
      @@ -0,0 +1,57 @@
      +/**
      + * This is the Windows implementation of isexe, which uses the file
      + * extension and PATHEXT setting.
      + *
      + * @module
      + */
      +import { statSync } from 'fs';
      +import { stat } from 'fs/promises';
      +/**
      + * Determine whether a path is executable based on the file extension
      + * and PATHEXT environment variable (or specified pathExt option)
      + */
      +export const isexe = async (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(await stat(path), path, options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +/**
      + * Synchronously determine whether a path is executable based on the file
      + * extension and PATHEXT environment variable (or specified pathExt option)
      + */
      +export const sync = (path, options = {}) => {
      +    const { ignoreErrors = false } = options;
      +    try {
      +        return checkStat(statSync(path), path, options);
      +    }
      +    catch (e) {
      +        const er = e;
      +        if (ignoreErrors || er.code === 'EACCES')
      +            return false;
      +        throw er;
      +    }
      +};
      +const checkPathExt = (path, options) => {
      +    const { pathExt = process.env.PATHEXT || '' } = options;
      +    const peSplit = pathExt.split(';');
      +    if (peSplit.indexOf('') !== -1) {
      +        return true;
      +    }
      +    for (let i = 0; i < peSplit.length; i++) {
      +        const p = peSplit[i].toLowerCase();
      +        const ext = path.substring(path.length - p.length).toLowerCase();
      +        if (p && ext === p) {
      +            return true;
      +        }
      +    }
      +    return false;
      +};
      +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
      +//# sourceMappingURL=win32.js.map
      \ No newline at end of file
      diff --git a/deps/npm/node_modules/which/node_modules/isexe/package.json b/deps/npm/node_modules/which/node_modules/isexe/package.json
      new file mode 100644
      index 00000000000000..a0e2cd04bfdbfe
      --- /dev/null
      +++ b/deps/npm/node_modules/which/node_modules/isexe/package.json
      @@ -0,0 +1,96 @@
      +{
      +  "name": "isexe",
      +  "version": "3.1.1",
      +  "description": "Minimal module to check if a file is executable.",
      +  "main": "./dist/cjs/index.js",
      +  "module": "./dist/mjs/index.js",
      +  "types": "./dist/cjs/index.js",
      +  "files": [
      +    "dist"
      +  ],
      +  "exports": {
      +    ".": {
      +      "import": {
      +        "types": "./dist/mjs/index.d.ts",
      +        "default": "./dist/mjs/index.js"
      +      },
      +      "require": {
      +        "types": "./dist/cjs/index.d.ts",
      +        "default": "./dist/cjs/index.js"
      +      }
      +    },
      +    "./posix": {
      +      "import": {
      +        "types": "./dist/mjs/posix.d.ts",
      +        "default": "./dist/mjs/posix.js"
      +      },
      +      "require": {
      +        "types": "./dist/cjs/posix.d.ts",
      +        "default": "./dist/cjs/posix.js"
      +      }
      +    },
      +    "./win32": {
      +      "import": {
      +        "types": "./dist/mjs/win32.d.ts",
      +        "default": "./dist/mjs/win32.js"
      +      },
      +      "require": {
      +        "types": "./dist/cjs/win32.d.ts",
      +        "default": "./dist/cjs/win32.js"
      +      }
      +    },
      +    "./package.json": "./package.json"
      +  },
      +  "devDependencies": {
      +    "@types/node": "^20.4.5",
      +    "@types/tap": "^15.0.8",
      +    "c8": "^8.0.1",
      +    "mkdirp": "^0.5.1",
      +    "prettier": "^2.8.8",
      +    "rimraf": "^2.5.0",
      +    "sync-content": "^1.0.2",
      +    "tap": "^16.3.8",
      +    "ts-node": "^10.9.1",
      +    "typedoc": "^0.24.8",
      +    "typescript": "^5.1.6"
      +  },
      +  "scripts": {
      +    "preversion": "npm test",
      +    "postversion": "npm publish",
      +    "prepublishOnly": "git push origin --follow-tags",
      +    "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh",
      +    "pretest": "npm run prepare",
      +    "presnap": "npm run prepare",
      +    "test": "c8 tap",
      +    "snap": "c8 tap",
      +    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
      +    "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts"
      +  },
      +  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
      +  "license": "ISC",
      +  "tap": {
      +    "coverage": false,
      +    "node-arg": [
      +      "--enable-source-maps",
      +      "--no-warnings",
      +      "--loader",
      +      "ts-node/esm"
      +    ],
      +    "ts": false
      +  },
      +  "prettier": {
      +    "semi": false,
      +    "printWidth": 75,
      +    "tabWidth": 2,
      +    "useTabs": false,
      +    "singleQuote": true,
      +    "jsxSingleQuote": false,
      +    "bracketSameLine": true,
      +    "arrowParens": "avoid",
      +    "endOfLine": "lf"
      +  },
      +  "repository": "https://github.com/isaacs/isexe",
      +  "engines": {
      +    "node": ">=16"
      +  }
      +}
      diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
      index 989e01c9a36830..515bfb22ca0e1e 100644
      --- a/deps/npm/node_modules/which/package.json
      +++ b/deps/npm/node_modules/which/package.json
      @@ -2,7 +2,7 @@
         "author": "GitHub Inc.",
         "name": "which",
         "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
      -  "version": "3.0.1",
      +  "version": "4.0.0",
         "repository": {
           "type": "git",
           "url": "https://github.com/npm/node-which.git"
      @@ -13,11 +13,11 @@
         },
         "license": "ISC",
         "dependencies": {
      -    "isexe": "^2.0.0"
      +    "isexe": "^3.1.1"
         },
         "devDependencies": {
           "@npmcli/eslint-config": "^4.0.0",
      -    "@npmcli/template-oss": "4.14.1",
      +    "@npmcli/template-oss": "4.18.0",
           "tap": "^16.3.0"
         },
         "scripts": {
      @@ -41,11 +41,17 @@
           ]
         },
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^16.13.0 || >=18.0.0"
         },
         "templateOSS": {
           "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
      -    "version": "4.14.1",
      +    "ciVersions": [
      +      "16.13.0",
      +      "16.x",
      +      "18.0.0",
      +      "18.x"
      +    ],
      +    "version": "4.18.0",
           "publish": "true"
         }
       }
      diff --git a/deps/npm/package.json b/deps/npm/package.json
      index 6e719a073893b0..8418f543b406ac 100644
      --- a/deps/npm/package.json
      +++ b/deps/npm/package.json
      @@ -1,5 +1,5 @@
       {
      -  "version": "9.8.1",
      +  "version": "10.0.0",
         "name": "npm",
         "description": "a package manager for JavaScript",
         "workspaces": [
      @@ -52,72 +52,72 @@
         },
         "dependencies": {
           "@isaacs/string-locale-compare": "^1.1.0",
      -    "@npmcli/arborist": "^6.3.0",
      -    "@npmcli/config": "^6.2.1",
      +    "@npmcli/arborist": "^7.0.0",
      +    "@npmcli/config": "^7.1.0",
           "@npmcli/fs": "^3.1.0",
           "@npmcli/map-workspaces": "^3.0.4",
      -    "@npmcli/package-json": "^4.0.1",
      -    "@npmcli/promise-spawn": "^6.0.2",
      -    "@npmcli/run-script": "^6.0.2",
      +    "@npmcli/package-json": "^5.0.0",
      +    "@npmcli/promise-spawn": "^7.0.0",
      +    "@npmcli/run-script": "^7.0.1",
      +    "@sigstore/tuf": "^2.1.0",
           "abbrev": "^2.0.0",
           "archy": "~1.0.0",
      -    "cacache": "^17.1.3",
      +    "cacache": "^18.0.0",
           "chalk": "^5.3.0",
           "ci-info": "^3.8.0",
           "cli-columns": "^4.0.0",
           "cli-table3": "^0.6.3",
           "columnify": "^1.6.0",
           "fastest-levenshtein": "^1.0.16",
      -    "fs-minipass": "^3.0.2",
      -    "glob": "^10.2.7",
      +    "fs-minipass": "^3.0.3",
      +    "glob": "^10.3.3",
           "graceful-fs": "^4.2.11",
      -    "hosted-git-info": "^6.1.1",
      +    "hosted-git-info": "^7.0.0",
           "ini": "^4.1.1",
      -    "init-package-json": "^5.0.0",
      +    "init-package-json": "^6.0.0",
           "is-cidr": "^4.0.2",
           "json-parse-even-better-errors": "^3.0.0",
      -    "libnpmaccess": "^7.0.2",
      -    "libnpmdiff": "^5.0.19",
      -    "libnpmexec": "^6.0.3",
      -    "libnpmfund": "^4.0.19",
      -    "libnpmhook": "^9.0.3",
      -    "libnpmorg": "^5.0.4",
      -    "libnpmpack": "^5.0.19",
      -    "libnpmpublish": "^7.5.0",
      -    "libnpmsearch": "^6.0.2",
      -    "libnpmteam": "^5.0.3",
      -    "libnpmversion": "^4.0.2",
      -    "make-fetch-happen": "^11.1.1",
      +    "libnpmaccess": "^8.0.0",
      +    "libnpmdiff": "^6.0.0",
      +    "libnpmexec": "^7.0.0",
      +    "libnpmfund": "^4.1.0",
      +    "libnpmhook": "^10.0.0",
      +    "libnpmorg": "^6.0.0",
      +    "libnpmpack": "^6.0.0",
      +    "libnpmpublish": "^9.0.0",
      +    "libnpmsearch": "^7.0.0",
      +    "libnpmteam": "^6.0.0",
      +    "libnpmversion": "^5.0.0",
      +    "make-fetch-happen": "^13.0.0",
           "minimatch": "^9.0.3",
      -    "minipass": "^5.0.0",
      +    "minipass": "^7.0.3",
           "minipass-pipeline": "^1.2.4",
           "ms": "^2.1.2",
           "node-gyp": "^9.4.0",
           "nopt": "^7.2.0",
           "npm-audit-report": "^5.0.0",
      -    "npm-install-checks": "^6.1.1",
      -    "npm-package-arg": "^10.1.0",
      -    "npm-pick-manifest": "^8.0.1",
      -    "npm-profile": "^7.0.1",
      -    "npm-registry-fetch": "^14.0.5",
      +    "npm-install-checks": "^6.2.0",
      +    "npm-package-arg": "^11.0.0",
      +    "npm-pick-manifest": "^9.0.0",
      +    "npm-profile": "^9.0.0",
      +    "npm-registry-fetch": "^16.0.0",
           "npm-user-validate": "^2.0.0",
           "npmlog": "^7.0.1",
           "p-map": "^4.0.0",
      -    "pacote": "^15.2.0",
      +    "pacote": "^17.0.4",
           "parse-conflict-json": "^3.0.1",
           "proc-log": "^3.0.0",
           "qrcode-terminal": "^0.12.0",
           "read": "^2.1.0",
           "semver": "^7.5.4",
      -    "sigstore": "^1.7.0",
      -    "ssri": "^10.0.4",
      +    "ssri": "^10.0.5",
           "supports-color": "^9.4.0",
           "tar": "^6.1.15",
           "text-table": "~0.2.0",
           "tiny-relative-date": "^1.3.0",
           "treeverse": "^3.0.0",
           "validate-npm-package-name": "^5.0.0",
      -    "which": "^3.0.1",
      +    "which": "^4.0.0",
           "write-file-atomic": "^5.0.1"
         },
         "bundleDependencies": [
      @@ -129,6 +129,7 @@
           "@npmcli/package-json",
           "@npmcli/promise-spawn",
           "@npmcli/run-script",
      +    "@sigstore/tuf",
           "abbrev",
           "archy",
           "cacache",
      @@ -179,7 +180,6 @@
           "qrcode-terminal",
           "read",
           "semver",
      -    "sigstore",
           "ssri",
           "supports-color",
           "tar",
      @@ -193,20 +193,20 @@
         "devDependencies": {
           "@npmcli/docs": "^1.0.0",
           "@npmcli/eslint-config": "^4.0.2",
      -    "@npmcli/git": "^4.1.0",
      +    "@npmcli/git": "^5.0.3",
           "@npmcli/mock-globals": "^1.0.0",
           "@npmcli/mock-registry": "^1.0.0",
           "@npmcli/template-oss": "4.18.0",
      -    "@tufjs/repo-mock": "^1.3.1",
      +    "@tufjs/repo-mock": "^2.0.0",
           "diff": "^5.1.0",
           "licensee": "^10.0.0",
      -    "nock": "^13.3.0",
      -    "npm-packlist": "^7.0.4",
      +    "nock": "^13.3.3",
      +    "npm-packlist": "^8.0.0",
           "remark": "^14.0.2",
           "remark-gfm": "^3.0.1",
           "remark-github": "^11.2.4",
           "spawk": "^1.7.1",
      -    "tap": "^16.3.4"
      +    "tap": "^16.3.8"
         },
         "scripts": {
           "dependencies": "node scripts/bundle-and-gitignore-deps.js && node scripts/dependency-graph.js",
      @@ -254,6 +254,6 @@
         },
         "license": "Artistic-2.0",
         "engines": {
      -    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
      +    "node": "^18.17.0 || >=20.5.0"
         }
       }
      diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
      index af600062c980e7..8346e8d9131fd7 100644
      --- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
      +++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
      @@ -30,7 +30,6 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
         "cafile": null,
         "call": "",
         "cert": null,
      -  "ci-name": null,
         "cidr": null,
         "color": true,
         "commit-hooks": true,
      @@ -147,7 +146,6 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
         "tag": "latest",
         "tag-version-prefix": "v",
         "timing": false,
      -  "tmp": "{TMP}",
         "umask": 0,
         "unicode": false,
         "update-notifier": true,
      @@ -161,8 +159,7 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
         "workspaces": null,
         "workspaces-update": true,
         "yes": null,
      -  "npm-version": "{NPM-VERSION}",
      -  "metrics-registry": "https://registry.npmjs.org/"
      +  "npm-version": "{NPM-VERSION}"
       }
       `
       
      @@ -187,7 +184,6 @@ cache-min = 0
       cafile = null
       call = ""
       cert = null
      -ci-name = null
       cidr = null
       color = true
       commit-hooks = true
      @@ -254,7 +250,6 @@ logs-max = 10
       ; long = false ; overridden by cli
       maxsockets = 15
       message = "%s"
      -metrics-registry = "https://registry.npmjs.org/"
       node-options = null
       noproxy = [""]
       npm-version = "{NPM-VERSION}"
      @@ -306,7 +301,6 @@ strict-ssl = true
       tag = "latest"
       tag-version-prefix = "v"
       timing = false
      -tmp = "{TMP}"
       umask = 0
       unicode = false
       update-notifier = true
      diff --git a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
      index b7ea39ac4de0ef..98d10c2bb5d4bb 100644
      --- a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
      +++ b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
      @@ -180,9 +180,9 @@ Object {
       
       exports[`test/lib/commands/doctor.js TAP bad proxy > output 1`] = `
       Check                               Value   Recommendation/Notes
      -npm ping                            not ok  unsupported proxy protocol: 'ssh:'
      -npm -v                              not ok  Error: unsupported proxy protocol: 'ssh:'
      -node -v                             not ok  Error: unsupported proxy protocol: 'ssh:'
      +npm ping                            not ok  Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
      +npm -v                              not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
      +node -v                             not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
       npm config get registry             ok      using default registry (https://registry.npmjs.org/)
       git executable in PATH              ok      /path/to/git
       global bin folder in PATH           ok      {CWD}/global/bin
      diff --git a/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
      index bfa4b42182e1ea..a47cdba22003fe 100644
      --- a/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
      +++ b/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
      @@ -24,6 +24,7 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    |
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
       
       exports[`test/lib/commands/search.js TAP search  --color > should have expected search results with color 1`] = `
      @@ -41,6 +42,7 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    | 
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    | 
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    | 
       `
       
       exports[`test/lib/commands/search.js TAP search  --parseable > should have expected search results as parseable 1`] = `
      @@ -57,6 +59,7 @@ libnpmfund	Programmatic API for npm fund	=nlf =ruyadorno =darcyclarke =isaacs	20
       @npmcli/map-workspaces	Retrieves a name:pathname Map for a given workspaces config	=nlf =ruyadorno =darcyclarke =isaacs	2020-09-30 	1.0.1	npm npmcli libnpm cli workspaces map-workspaces
       libnpmversion	library to do the things that 'npm version' does	=nlf =ruyadorno =darcyclarke =isaacs	2020-11-04 	1.0.7
       @types/libnpmsearch	TypeScript definitions for libnpmsearch	=types	2019-09-26 	2.0.1
      +pkg-no-desc		=lukekarrys	2019-09-26 	1.0.0
       `
       
       exports[`test/lib/commands/search.js TAP search  > should have filtered expected search results 1`] = `
      @@ -80,6 +83,7 @@ libnpmfund                | Programmatic API…    | =nlf…           | 2020-12
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    |
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
       
       exports[`test/lib/commands/search.js TAP search exclude forward slash > results should not have libnpmversion 1`] = `
      @@ -96,6 +100,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
       libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
       
       exports[`test/lib/commands/search.js TAP search exclude regex > results should not have libnpmversion 1`] = `
      @@ -112,6 +117,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
       libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
       
       exports[`test/lib/commands/search.js TAP search exclude string > results should not have libnpmversion 1`] = `
      @@ -128,6 +134,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
       libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
       @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
       
       exports[`test/lib/commands/search.js TAP search exclude username with upper case letters > results should not have nlf 1`] = `
      @@ -135,4 +142,5 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
       @evocateur/libnpmaccess   | programmatic…        | =evocateur      | 2019-07-16 | 3.1.2    |
       @evocateur/libnpmpublish  | Programmatic API…    | =evocateur      | 2019-07-16 | 1.2.2    |
       @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
      +pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
       `
      diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
      index 4875ebae6952b2..463b0862d2be8d 100644
      --- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
      +++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
      @@ -1822,20 +1822,6 @@ registry-scoped "certfile" path like
       
       
       
      -#### \`ci-name\`
      -
      -* Default: The name of the current CI system, or \`null\` when not on a known CI
      -  platform.
      -* Type: null or String
      -* DEPRECATED: This config is deprecated and will not be changeable in future
      -  version of npm.
      -
      -The name of a continuous integration system. If not set explicitly, npm will
      -detect the current CI environment using the
      -[\`ci-info\`](http://npm.im/ci-info) module.
      -
      -
      -
       #### \`dev\`
       
       * Default: false
      @@ -1995,20 +1981,6 @@ Alias for \`--omit=dev\`
       Alias for --package-lock
       
       
      -
      -#### \`tmp\`
      -
      -* Default: The value returned by the Node.js \`os.tmpdir()\` method
      -  
      -* Type: Path
      -* DEPRECATED: This setting is no longer used. npm stores temporary files in a
      -  special location in the cache, and they are managed by
      -  [\`cacache\`](http://npm.im/cacache).
      -
      -Historically, the location where temporary files were stored. No longer
      -relevant.
      -
      -
       `
       
       exports[`test/lib/docs.js TAP config > all keys 1`] = `
      @@ -2031,7 +2003,6 @@ Array [
         "cafile",
         "call",
         "cert",
      -  "ci-name",
         "cidr",
         "color",
         "commit-hooks",
      @@ -2148,7 +2119,6 @@ Array [
         "tag",
         "tag-version-prefix",
         "timing",
      -  "tmp",
         "umask",
         "unicode",
         "update-notifier",
      @@ -2186,7 +2156,6 @@ Array [
         "cafile",
         "call",
         "cert",
      -  "ci-name",
         "cidr",
         "color",
         "commit-hooks",
      @@ -2314,7 +2283,6 @@ Array [
         "node-options",
         "prefix",
         "timing",
      -  "tmp",
         "update-notifier",
         "usage",
         "userconfig",
      @@ -2343,7 +2311,6 @@ Object {
         "call": "",
         "cert": null,
         "cidr": null,
      -  "ciName": "{ci}",
         "color": false,
         "commitHooks": true,
         "defaultTag": "latest",
      @@ -2367,7 +2334,6 @@ Object {
         "gitTagVersion": true,
         "global": false,
         "globalconfig": "{CWD}/global/etc/npmrc",
      -  "hashAlgorithm": "sha1",
         "heading": "npm",
         "httpsProxy": null,
         "ifPresent": false,
      diff --git a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
      index 93711275392339..3e7bc4570dd4ad 100644
      --- a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
      +++ b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
      @@ -63,4 +63,5 @@ verbose exit 1
       timing npm Completed in {TIME}ms
       verbose code 1
       error  A complete log of this run can be found in: {CWD}/cache/_logs/{DATE}-debug-0.log
      +silly logfile done cleaning log files
       `
      diff --git a/deps/npm/test/fixtures/libnpmsearch-stream-result.js b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
      index 1ec8b7b113d6b8..ac792b1c087c8f 100644
      --- a/deps/npm/test/fixtures/libnpmsearch-stream-result.js
      +++ b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
      @@ -275,4 +275,11 @@ module.exports = [
           publisher: { username: 'types', email: 'ts-npm-types@microsoft.com' },
           maintainers: [{ username: 'types', email: 'ts-npm-types@microsoft.com' }],
         },
      +  {
      +    name: 'pkg-no-desc',
      +    scope: 'unscoped',
      +    version: '1.0.0',
      +    date: '2019-09-26T22:24:28.713Z',
      +    maintainers: [{ username: 'lukekarrys', email: 'lukekarrys' }],
      +  },
       ]
      diff --git a/deps/npm/test/fixtures/sandbox.js b/deps/npm/test/fixtures/sandbox.js
      index 2c4e5c2968a38c..5be02fcf80c1eb 100644
      --- a/deps/npm/test/fixtures/sandbox.js
      +++ b/deps/npm/test/fixtures/sandbox.js
      @@ -42,11 +42,6 @@ const _get = Symbol('sandbox.proxy.get')
       const _set = Symbol('sandbox.proxy.set')
       const _logs = Symbol('sandbox.logs')
       
      -// these config keys can be redacted widely
      -const redactedDefaults = [
      -  'tmp',
      -]
      -
       // we can't just replace these values everywhere because they're known to be
       // very short strings that could be present all over the place, so we only
       // replace them if they're located within quotes for now
      @@ -161,12 +156,6 @@ class Sandbox extends EventEmitter {
           // and we replaced the node version first, the real execPath we're trying
           // to replace would no longer be represented, and be missed.
           if (this[_npm]) {
      -      // replace default config values with placeholders
      -      for (const name of redactedDefaults) {
      -        const value = this[_npm].config.defaults[name]
      -        clean = clean.split(normalize(value)).join(`{${name.toUpperCase()}}`)
      -      }
      -
             // replace vague default config values that are present within quotes
             // with placeholders
             for (const name of vagueRedactedDefaults) {
      diff --git a/deps/npm/test/lib/commands/audit.js b/deps/npm/test/lib/commands/audit.js
      index 4014e733873519..4a776e89bd9e9c 100644
      --- a/deps/npm/test/lib/commands/audit.js
      +++ b/deps/npm/test/lib/commands/audit.js
      @@ -1699,16 +1699,12 @@ t.test('audit signatures', async t => {
           const { npm } = await loadMockNpm(t, {
             prefixDir: installWithMultipleDeps,
             mocks: {
      -        sigstore: {
      -          sigstore: {
      -            tuf: {
      -              client: async () => ({
      -                getTarget: async () => {
      -                  throw new Error('error refreshing TUF metadata')
      -                },
      -              }),
      +        '@sigstore/tuf': {
      +          initTUF: async () => ({
      +            getTarget: async () => {
      +              throw new Error('error refreshing TUF metadata')
                   },
      -          },
      +          }),
               },
             },
           })
      @@ -1877,9 +1873,7 @@ t.test('audit signatures', async t => {
             prefixDir: installWithValidAttestations,
             mocks: {
               pacote: t.mock('pacote', {
      -          sigstore: {
      -            sigstore: { verify: async () => true },
      -          },
      +          sigstore: { verify: async () => true },
               }),
             },
           })
      @@ -1904,9 +1898,7 @@ t.test('audit signatures', async t => {
             prefixDir: installWithMultipleValidAttestations,
             mocks: {
               pacote: t.mock('pacote', {
      -          sigstore: {
      -            sigstore: { verify: async () => true },
      -          },
      +          sigstore: { verify: async () => true },
               }),
             },
           })
      @@ -1937,10 +1929,8 @@ t.test('audit signatures', async t => {
             mocks: {
               pacote: t.mock('pacote', {
                 sigstore: {
      -            sigstore: {
      -              verify: async () => {
      -                throw new Error(`artifact signature verification failed`)
      -              },
      +            verify: async () => {
      +              throw new Error(`artifact signature verification failed`)
                   },
                 },
               }),
      @@ -1974,10 +1964,8 @@ t.test('audit signatures', async t => {
             mocks: {
               pacote: t.mock('pacote', {
                 sigstore: {
      -            sigstore: {
      -              verify: async () => {
      -                throw new Error(`artifact signature verification failed`)
      -              },
      +            verify: async () => {
      +              throw new Error(`artifact signature verification failed`)
                   },
                 },
               }),
      @@ -2005,10 +1993,8 @@ t.test('audit signatures', async t => {
             mocks: {
               pacote: t.mock('pacote', {
                 sigstore: {
      -            sigstore: {
      -              verify: async () => {
      -                throw new Error(`artifact signature verification failed`)
      -              },
      +            verify: async () => {
      +              throw new Error(`artifact signature verification failed`)
                   },
                 },
               }),
      diff --git a/deps/npm/test/lib/commands/run-script.js b/deps/npm/test/lib/commands/run-script.js
      index cb54a7f51e9002..24f51400e8dfc3 100644
      --- a/deps/npm/test/lib/commands/run-script.js
      +++ b/deps/npm/test/lib/commands/run-script.js
      @@ -781,12 +781,7 @@ t.test('workspaces', async t => {
         t.test('missing scripts in all workspaces', async t => {
           const { runScript, RUN_SCRIPTS, cleanLogs } = await mockWorkspaces(t, { exec: null })
       
      -    await t.rejects(
      -      runScript.exec(['missing-script']),
      -      /Missing script: missing-script/,
      -      'should throw missing script error'
      -    )
      -
      +    await runScript.exec(['missing-script'])
           t.match(RUN_SCRIPTS(), [])
           t.strictSame(
             cleanLogs(),
      diff --git a/deps/npm/test/lib/utils/exit-handler.js b/deps/npm/test/lib/utils/exit-handler.js
      index f553e1a2ea518d..3eb5840985b8f5 100644
      --- a/deps/npm/test/lib/utils/exit-handler.js
      +++ b/deps/npm/test/lib/utils/exit-handler.js
      @@ -132,6 +132,8 @@ t.test('handles unknown error with logs and debug file', async (t) => {
         const { exitHandler, debugFile, logs } = await mockExitHandler(t)
       
         await exitHandler(err('Unknown error', 'ECODE'))
      +  // force logfile cleaning logs to happen since those are purposefully not awaited
      +  await require('timers/promises').setTimeout(200)
       
         const fileLogs = await debugFile()
         const fileLines = fileLogs.split('\n')
      @@ -141,14 +143,19 @@ t.test('handles unknown error with logs and debug file', async (t) => {
       
         t.equal(process.exitCode, 1)
       
      +  let skippedLogs = 0
         logs.forEach((logItem, i) => {
           const logLines = format(i, ...logItem).trim().split(os.EOL)
      -    logLines.forEach((line) => {
      +    for (const line of logLines) {
      +      if (line.includes('logfile') && line.includes('cleaning')) {
      +        skippedLogs++
      +        continue
      +      }
             t.match(fileLogs.trim(), line, 'log appears in debug file')
      -    })
      +    }
         })
       
      -  t.equal(logs.length, parseInt(lastLog) + 1)
      +  t.equal(logs.length - skippedLogs, parseInt(lastLog) + 1)
         t.match(logs.error, [
           ['code', 'ECODE'],
           ['ERR SUMMARY', 'Unknown error'],