diff --git a/deps/npm/bin/npm.ps1 b/deps/npm/bin/npm.ps1
index 04a1fd478ef9dd..5993adaf556621 100644
--- a/deps/npm/bin/npm.ps1
+++ b/deps/npm/bin/npm.ps1
@@ -22,11 +22,27 @@ if (Test-Path $NPM_PREFIX_NPM_CLI_JS) {
$NPM_CLI_JS=$NPM_PREFIX_NPM_CLI_JS
}
-# Support pipeline input
-if ($MyInvocation.ExpectingInput) {
+if ($MyInvocation.ExpectingInput) { # takes pipeline input
$input | & $NODE_EXE $NPM_CLI_JS $args
-} else {
+} elseif (-not $MyInvocation.Line) { # used "-File" argument
& $NODE_EXE $NPM_CLI_JS $args
+} else { # used "-Command" argument
+ if ($MyInvocation.Statement) {
+ $NPM_ORIGINAL_COMMAND = $MyInvocation.Statement
+ } else {
+ $NPM_ORIGINAL_COMMAND = (
+ [Management.Automation.InvocationInfo].GetProperty('ScriptPosition', [Reflection.BindingFlags] 'Instance, NonPublic')
+ ).GetValue($MyInvocation).Text
+ }
+
+ $NODE_EXE = $NODE_EXE.Replace("``", "````")
+ $NPM_CLI_JS = $NPM_CLI_JS.Replace("``", "````")
+
+ $NPM_NO_REDIRECTS_COMMAND = [Management.Automation.Language.Parser]::ParseInput($NPM_ORIGINAL_COMMAND, [ref] $null, [ref] $null).
+ EndBlock.Statements.PipelineElements.CommandElements.Extent.Text -join ' '
+ $NPM_ARGS = $NPM_NO_REDIRECTS_COMMAND.Substring($MyInvocation.InvocationName.Length).Trim()
+
+ Invoke-Expression "& `"$NODE_EXE`" `"$NPM_CLI_JS`" $NPM_ARGS"
}
exit $LASTEXITCODE
diff --git a/deps/npm/bin/npx.ps1 b/deps/npm/bin/npx.ps1
index 28dae51b22ca93..cc1aa047bdc217 100644
--- a/deps/npm/bin/npx.ps1
+++ b/deps/npm/bin/npx.ps1
@@ -22,11 +22,27 @@ if (Test-Path $NPM_PREFIX_NPX_CLI_JS) {
$NPX_CLI_JS=$NPM_PREFIX_NPX_CLI_JS
}
-# Support pipeline input
-if ($MyInvocation.ExpectingInput) {
+if ($MyInvocation.ExpectingInput) { # takes pipeline input
$input | & $NODE_EXE $NPX_CLI_JS $args
-} else {
+} elseif (-not $MyInvocation.Line) { # used "-File" argument
& $NODE_EXE $NPX_CLI_JS $args
+} else { # used "-Command" argument
+ if ($MyInvocation.Statement) {
+ $NPX_ORIGINAL_COMMAND = $MyInvocation.Statement
+ } else {
+ $NPX_ORIGINAL_COMMAND = (
+ [Management.Automation.InvocationInfo].GetProperty('ScriptPosition', [Reflection.BindingFlags] 'Instance, NonPublic')
+ ).GetValue($MyInvocation).Text
+ }
+
+ $NODE_EXE = $NODE_EXE.Replace("``", "````")
+ $NPX_CLI_JS = $NPX_CLI_JS.Replace("``", "````")
+
+ $NPX_NO_REDIRECTS_COMMAND = [Management.Automation.Language.Parser]::ParseInput($NPX_ORIGINAL_COMMAND, [ref] $null, [ref] $null).
+ EndBlock.Statements.PipelineElements.CommandElements.Extent.Text -join ' '
+ $NPX_ARGS = $NPX_NO_REDIRECTS_COMMAND.Substring($MyInvocation.InvocationName.Length).Trim()
+
+ Invoke-Expression "& `"$NODE_EXE`" `"$NPX_CLI_JS`" $NPX_ARGS"
}
exit $LASTEXITCODE
diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md
index 69bc86f85f3508..76046cf66c4fa8 100644
--- a/deps/npm/docs/content/commands/npm-ls.md
+++ b/deps/npm/docs/content/commands/npm-ls.md
@@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For
example, running `npm ls promzard` in npm's source tree will show:
```bash
-npm@10.9.2 /path/to/npm
+npm@10.9.3 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
```
diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md
index 029b9fa7631544..da1cfef4f9f8db 100644
--- a/deps/npm/docs/content/commands/npm.md
+++ b/deps/npm/docs/content/commands/npm.md
@@ -14,7 +14,7 @@ Note: This command is unaware of workspaces.
### Version
-10.9.2
+10.9.3
### Description
diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md
index 7e9daf1317717f..4663aa2b78244e 100644
--- a/deps/npm/docs/content/configuring-npm/package-json.md
+++ b/deps/npm/docs/content/configuring-npm/package-json.md
@@ -621,7 +621,7 @@ See [semver](https://github.com/npm/node-semver#versions) for more details about
* `tag` A specific version tagged and published as `tag` See [`npm
dist-tag`](/commands/npm-dist-tag)
* `path/path/path` See [Local Paths](#local-paths) below
-* `npm:@scope/pkg@version` Custom alias for a pacakge See [`package-spec`](/using-npm/package-spec#aliases)
+* `npm:@scope/pkg@version` Custom alias for a package See [`package-spec`](/using-npm/package-spec#aliases)
For example, these are all valid:
diff --git a/deps/npm/docs/output/commands/npm-access.html b/deps/npm/docs/output/commands/npm-access.html
index 9ce3c00e3525d8..70b6eadb4ce10c 100644
--- a/deps/npm/docs/output/commands/npm-access.html
+++ b/deps/npm/docs/output/commands/npm-access.html
@@ -141,9 +141,9 @@
-
+
npm-access
- @10.9.2
+ @10.9.3
Set access level on published packages
diff --git a/deps/npm/docs/output/commands/npm-adduser.html b/deps/npm/docs/output/commands/npm-adduser.html
index 5f6627229d2c8b..2f39972874ceed 100644
--- a/deps/npm/docs/output/commands/npm-adduser.html
+++ b/deps/npm/docs/output/commands/npm-adduser.html
@@ -141,9 +141,9 @@
-
+
npm-adduser
- @10.9.2
+ @10.9.3
Add a registry user account
diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html
index a8934d172e3602..37ba1dbafd4a67 100644
--- a/deps/npm/docs/output/commands/npm-audit.html
+++ b/deps/npm/docs/output/commands/npm-audit.html
@@ -141,9 +141,9 @@
-
+
npm-audit
- @10.9.2
+ @10.9.3
Run a security audit
diff --git a/deps/npm/docs/output/commands/npm-bugs.html b/deps/npm/docs/output/commands/npm-bugs.html
index 3ec74a05673259..fd8333a8c0d82d 100644
--- a/deps/npm/docs/output/commands/npm-bugs.html
+++ b/deps/npm/docs/output/commands/npm-bugs.html
@@ -141,9 +141,9 @@
-
+
npm-bugs
- @10.9.2
+ @10.9.3
Report bugs for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-cache.html b/deps/npm/docs/output/commands/npm-cache.html
index 0a4fd00a276a47..9c2ab2b626bb99 100644
--- a/deps/npm/docs/output/commands/npm-cache.html
+++ b/deps/npm/docs/output/commands/npm-cache.html
@@ -141,9 +141,9 @@
-
+
npm-cache
- @10.9.2
+ @10.9.3
Manipulates packages cache
diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html
index 3af67dc55c81db..4bb718d95c4d59 100644
--- a/deps/npm/docs/output/commands/npm-ci.html
+++ b/deps/npm/docs/output/commands/npm-ci.html
@@ -141,9 +141,9 @@
-
+
npm-ci
- @10.9.2
+ @10.9.3
Clean install a project
diff --git a/deps/npm/docs/output/commands/npm-completion.html b/deps/npm/docs/output/commands/npm-completion.html
index 3f8b509e4ec8d6..0dfba0ac106c0d 100644
--- a/deps/npm/docs/output/commands/npm-completion.html
+++ b/deps/npm/docs/output/commands/npm-completion.html
@@ -141,9 +141,9 @@
-
+
npm-completion
- @10.9.2
+ @10.9.3
Tab Completion for npm
diff --git a/deps/npm/docs/output/commands/npm-config.html b/deps/npm/docs/output/commands/npm-config.html
index c9b9a2c7550bcc..e949f98b2673af 100644
--- a/deps/npm/docs/output/commands/npm-config.html
+++ b/deps/npm/docs/output/commands/npm-config.html
@@ -141,9 +141,9 @@
-
+
npm-config
- @10.9.2
+ @10.9.3
Manage the npm configuration files
diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html
index 0aa8bf5a5bde77..4013eb9e666812 100644
--- a/deps/npm/docs/output/commands/npm-dedupe.html
+++ b/deps/npm/docs/output/commands/npm-dedupe.html
@@ -141,9 +141,9 @@
-
+
npm-dedupe
- @10.9.2
+ @10.9.3
Reduce duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-deprecate.html b/deps/npm/docs/output/commands/npm-deprecate.html
index 0019583ee2135a..bcfd0680555466 100644
--- a/deps/npm/docs/output/commands/npm-deprecate.html
+++ b/deps/npm/docs/output/commands/npm-deprecate.html
@@ -141,9 +141,9 @@
-
+
npm-deprecate
- @10.9.2
+ @10.9.3
Deprecate a version of a package
diff --git a/deps/npm/docs/output/commands/npm-diff.html b/deps/npm/docs/output/commands/npm-diff.html
index fe2123ee60fdc8..167390fcd70237 100644
--- a/deps/npm/docs/output/commands/npm-diff.html
+++ b/deps/npm/docs/output/commands/npm-diff.html
@@ -141,9 +141,9 @@
-
+
npm-diff
- @10.9.2
+ @10.9.3
The registry diff command
diff --git a/deps/npm/docs/output/commands/npm-dist-tag.html b/deps/npm/docs/output/commands/npm-dist-tag.html
index dce3f752ba4eaa..484575ce7d3622 100644
--- a/deps/npm/docs/output/commands/npm-dist-tag.html
+++ b/deps/npm/docs/output/commands/npm-dist-tag.html
@@ -141,9 +141,9 @@
-
+
npm-dist-tag
- @10.9.2
+ @10.9.3
Modify package distribution tags
diff --git a/deps/npm/docs/output/commands/npm-docs.html b/deps/npm/docs/output/commands/npm-docs.html
index caef5afe3b1bd4..f1ec5477a13375 100644
--- a/deps/npm/docs/output/commands/npm-docs.html
+++ b/deps/npm/docs/output/commands/npm-docs.html
@@ -141,9 +141,9 @@
-
+
npm-docs
- @10.9.2
+ @10.9.3
Open documentation for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-doctor.html b/deps/npm/docs/output/commands/npm-doctor.html
index d9f7a71450ab75..68e1127b50df92 100644
--- a/deps/npm/docs/output/commands/npm-doctor.html
+++ b/deps/npm/docs/output/commands/npm-doctor.html
@@ -141,9 +141,9 @@
-
+
npm-doctor
- @10.9.2
+ @10.9.3
Check the health of your npm environment
diff --git a/deps/npm/docs/output/commands/npm-edit.html b/deps/npm/docs/output/commands/npm-edit.html
index ab835ddcd352f9..bbb83bd907e61d 100644
--- a/deps/npm/docs/output/commands/npm-edit.html
+++ b/deps/npm/docs/output/commands/npm-edit.html
@@ -141,9 +141,9 @@
-
+
npm-edit
- @10.9.2
+ @10.9.3
Edit an installed package
diff --git a/deps/npm/docs/output/commands/npm-exec.html b/deps/npm/docs/output/commands/npm-exec.html
index b5b8f66bdbb54f..1397bc5048ab30 100644
--- a/deps/npm/docs/output/commands/npm-exec.html
+++ b/deps/npm/docs/output/commands/npm-exec.html
@@ -141,9 +141,9 @@
-
+
npm-exec
- @10.9.2
+ @10.9.3
Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/commands/npm-explain.html b/deps/npm/docs/output/commands/npm-explain.html
index 812ce85f73a130..c0d12ef1a3e381 100644
--- a/deps/npm/docs/output/commands/npm-explain.html
+++ b/deps/npm/docs/output/commands/npm-explain.html
@@ -141,9 +141,9 @@
-
+
npm-explain
- @10.9.2
+ @10.9.3
Explain installed packages
diff --git a/deps/npm/docs/output/commands/npm-explore.html b/deps/npm/docs/output/commands/npm-explore.html
index 20d761c0db149c..31778889549a93 100644
--- a/deps/npm/docs/output/commands/npm-explore.html
+++ b/deps/npm/docs/output/commands/npm-explore.html
@@ -141,9 +141,9 @@
-
+
npm-explore
- @10.9.2
+ @10.9.3
Browse an installed package
diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html
index 9b2810cf1b17ac..fb41969e5629f2 100644
--- a/deps/npm/docs/output/commands/npm-find-dupes.html
+++ b/deps/npm/docs/output/commands/npm-find-dupes.html
@@ -141,9 +141,9 @@
-
+
npm-find-dupes
- @10.9.2
+ @10.9.3
Find duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-fund.html b/deps/npm/docs/output/commands/npm-fund.html
index 91e0a71c3b32af..2060175c577477 100644
--- a/deps/npm/docs/output/commands/npm-fund.html
+++ b/deps/npm/docs/output/commands/npm-fund.html
@@ -141,9 +141,9 @@
-
+
npm-fund
- @10.9.2
+ @10.9.3
Retrieve funding information
diff --git a/deps/npm/docs/output/commands/npm-help-search.html b/deps/npm/docs/output/commands/npm-help-search.html
index 35e2ec587c48e8..379ea248e49f2b 100644
--- a/deps/npm/docs/output/commands/npm-help-search.html
+++ b/deps/npm/docs/output/commands/npm-help-search.html
@@ -141,9 +141,9 @@
-
+
npm-help-search
- @10.9.2
+ @10.9.3
Search npm help documentation
diff --git a/deps/npm/docs/output/commands/npm-help.html b/deps/npm/docs/output/commands/npm-help.html
index 17403f14c89427..b7bb989a67165a 100644
--- a/deps/npm/docs/output/commands/npm-help.html
+++ b/deps/npm/docs/output/commands/npm-help.html
@@ -141,9 +141,9 @@
-
+
npm-help
- @10.9.2
+ @10.9.3
Get help on npm
diff --git a/deps/npm/docs/output/commands/npm-hook.html b/deps/npm/docs/output/commands/npm-hook.html
index d06784faf03624..9e55028bc6bae4 100644
--- a/deps/npm/docs/output/commands/npm-hook.html
+++ b/deps/npm/docs/output/commands/npm-hook.html
@@ -141,9 +141,9 @@
-
+
npm-hook
- @10.9.2
+ @10.9.3
Manage registry hooks
diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html
index 430763db6ba6af..125a6ab5b7c272 100644
--- a/deps/npm/docs/output/commands/npm-init.html
+++ b/deps/npm/docs/output/commands/npm-init.html
@@ -141,9 +141,9 @@
-
+
npm-init
- @10.9.2
+ @10.9.3
Create a package.json file
diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
index 6a29d2d54f679e..c7f5aeec64c2c6 100644
--- a/deps/npm/docs/output/commands/npm-install-ci-test.html
+++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
@@ -141,9 +141,9 @@
-
+
npm-install-ci-test
- @10.9.2
+ @10.9.3
Install a project with a clean slate and run tests
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html
index 32bd2271074fb6..d5679304f8ce35 100644
--- a/deps/npm/docs/output/commands/npm-install-test.html
+++ b/deps/npm/docs/output/commands/npm-install-test.html
@@ -141,9 +141,9 @@
-
+
npm-install-test
- @10.9.2
+ @10.9.3
Install package(s) and run tests
diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html
index db7d717d18160b..586d01806dc802 100644
--- a/deps/npm/docs/output/commands/npm-install.html
+++ b/deps/npm/docs/output/commands/npm-install.html
@@ -141,9 +141,9 @@
-
+
npm-install
- @10.9.2
+ @10.9.3
Install a package
diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html
index 5778cc2a6268d2..dc48efc55adec5 100644
--- a/deps/npm/docs/output/commands/npm-link.html
+++ b/deps/npm/docs/output/commands/npm-link.html
@@ -141,9 +141,9 @@
-
+
npm-link
- @10.9.2
+ @10.9.3
Symlink a package folder
diff --git a/deps/npm/docs/output/commands/npm-login.html b/deps/npm/docs/output/commands/npm-login.html
index 81555fcecefd3e..e0f65c54f1ae54 100644
--- a/deps/npm/docs/output/commands/npm-login.html
+++ b/deps/npm/docs/output/commands/npm-login.html
@@ -141,9 +141,9 @@
-
+
npm-login
- @10.9.2
+ @10.9.3
Login to a registry user account
diff --git a/deps/npm/docs/output/commands/npm-logout.html b/deps/npm/docs/output/commands/npm-logout.html
index 1b6cdf8b923034..762db67a14a92d 100644
--- a/deps/npm/docs/output/commands/npm-logout.html
+++ b/deps/npm/docs/output/commands/npm-logout.html
@@ -141,9 +141,9 @@
-
+
npm-logout
- @10.9.2
+ @10.9.3
Log out of the registry
diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html
index c9aa847abf1dd4..e78f94f486fa24 100644
--- a/deps/npm/docs/output/commands/npm-ls.html
+++ b/deps/npm/docs/output/commands/npm-ls.html
@@ -141,9 +141,9 @@
-
+
npm-ls
- @10.9.2
+ @10.9.3
List installed packages
@@ -168,7 +168,7 @@ Description
the results to only the paths to the packages named. Note that nested
packages will also show the paths to the specified packages. For
example, running npm ls promzard in npm's source tree will show:
-npm@10.9.2 /path/to/npm
+npm@10.9.3 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
diff --git a/deps/npm/docs/output/commands/npm-org.html b/deps/npm/docs/output/commands/npm-org.html
index 6c45111f034994..a036d164bde65f 100644
--- a/deps/npm/docs/output/commands/npm-org.html
+++ b/deps/npm/docs/output/commands/npm-org.html
@@ -141,9 +141,9 @@
-
+
npm-org
- @10.9.2
+ @10.9.3
Manage orgs
diff --git a/deps/npm/docs/output/commands/npm-outdated.html b/deps/npm/docs/output/commands/npm-outdated.html
index 9be28fdfd239e1..04f8e5d7e00839 100644
--- a/deps/npm/docs/output/commands/npm-outdated.html
+++ b/deps/npm/docs/output/commands/npm-outdated.html
@@ -141,9 +141,9 @@
-
+
npm-outdated
- @10.9.2
+ @10.9.3
Check for outdated packages
diff --git a/deps/npm/docs/output/commands/npm-owner.html b/deps/npm/docs/output/commands/npm-owner.html
index fc0899e5e5d66b..16f7d512d1ca6c 100644
--- a/deps/npm/docs/output/commands/npm-owner.html
+++ b/deps/npm/docs/output/commands/npm-owner.html
@@ -141,9 +141,9 @@
-
+
npm-owner
- @10.9.2
+ @10.9.3
Manage package owners
diff --git a/deps/npm/docs/output/commands/npm-pack.html b/deps/npm/docs/output/commands/npm-pack.html
index 359d463780e510..a2d935991505f1 100644
--- a/deps/npm/docs/output/commands/npm-pack.html
+++ b/deps/npm/docs/output/commands/npm-pack.html
@@ -141,9 +141,9 @@
-
+
npm-pack
- @10.9.2
+ @10.9.3
Create a tarball from a package
diff --git a/deps/npm/docs/output/commands/npm-ping.html b/deps/npm/docs/output/commands/npm-ping.html
index 4b3fbbc698cbb7..16a587591db07a 100644
--- a/deps/npm/docs/output/commands/npm-ping.html
+++ b/deps/npm/docs/output/commands/npm-ping.html
@@ -141,9 +141,9 @@
-
+
npm-ping
- @10.9.2
+ @10.9.3
Ping npm registry
diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html
index f1fe76e8073150..ea15ff20c96b97 100644
--- a/deps/npm/docs/output/commands/npm-pkg.html
+++ b/deps/npm/docs/output/commands/npm-pkg.html
@@ -141,9 +141,9 @@
-
+
npm-pkg
- @10.9.2
+ @10.9.3
Manages your package.json
diff --git a/deps/npm/docs/output/commands/npm-prefix.html b/deps/npm/docs/output/commands/npm-prefix.html
index 9708e55234ab4c..040fead6ce89f7 100644
--- a/deps/npm/docs/output/commands/npm-prefix.html
+++ b/deps/npm/docs/output/commands/npm-prefix.html
@@ -141,9 +141,9 @@
-
+
npm-prefix
- @10.9.2
+ @10.9.3
Display prefix
diff --git a/deps/npm/docs/output/commands/npm-profile.html b/deps/npm/docs/output/commands/npm-profile.html
index 6bb08b1fda63a6..cae32b74743e5b 100644
--- a/deps/npm/docs/output/commands/npm-profile.html
+++ b/deps/npm/docs/output/commands/npm-profile.html
@@ -141,9 +141,9 @@
-
+
npm-profile
- @10.9.2
+ @10.9.3
Change settings on your registry profile
diff --git a/deps/npm/docs/output/commands/npm-prune.html b/deps/npm/docs/output/commands/npm-prune.html
index 818522d63754f9..6c8a2bedc2f5f9 100644
--- a/deps/npm/docs/output/commands/npm-prune.html
+++ b/deps/npm/docs/output/commands/npm-prune.html
@@ -141,9 +141,9 @@
-
+
npm-prune
- @10.9.2
+ @10.9.3
Remove extraneous packages
diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html
index 37c19e0b8f357c..e8c44e9249258e 100644
--- a/deps/npm/docs/output/commands/npm-publish.html
+++ b/deps/npm/docs/output/commands/npm-publish.html
@@ -141,9 +141,9 @@
-
+
npm-publish
- @10.9.2
+ @10.9.3
Publish a package
diff --git a/deps/npm/docs/output/commands/npm-query.html b/deps/npm/docs/output/commands/npm-query.html
index 4a0210e62f58ea..71c6807298594a 100644
--- a/deps/npm/docs/output/commands/npm-query.html
+++ b/deps/npm/docs/output/commands/npm-query.html
@@ -141,9 +141,9 @@
-
+
npm-query
- @10.9.2
+ @10.9.3
Dependency selector query
diff --git a/deps/npm/docs/output/commands/npm-rebuild.html b/deps/npm/docs/output/commands/npm-rebuild.html
index 0a356397c4a95b..3936a927c4724c 100644
--- a/deps/npm/docs/output/commands/npm-rebuild.html
+++ b/deps/npm/docs/output/commands/npm-rebuild.html
@@ -141,9 +141,9 @@
-
+
npm-rebuild
- @10.9.2
+ @10.9.3
Rebuild a package
diff --git a/deps/npm/docs/output/commands/npm-repo.html b/deps/npm/docs/output/commands/npm-repo.html
index ad86f81682375d..47a39d7fc2735d 100644
--- a/deps/npm/docs/output/commands/npm-repo.html
+++ b/deps/npm/docs/output/commands/npm-repo.html
@@ -141,9 +141,9 @@
-
+
npm-repo
- @10.9.2
+ @10.9.3
Open package repository page in the browser
diff --git a/deps/npm/docs/output/commands/npm-restart.html b/deps/npm/docs/output/commands/npm-restart.html
index 4a2244b048ecbc..a7db0633e39829 100644
--- a/deps/npm/docs/output/commands/npm-restart.html
+++ b/deps/npm/docs/output/commands/npm-restart.html
@@ -141,9 +141,9 @@
-
+
npm-restart
- @10.9.2
+ @10.9.3
Restart a package
diff --git a/deps/npm/docs/output/commands/npm-root.html b/deps/npm/docs/output/commands/npm-root.html
index 499bc84358b3e5..973afad93ecc9b 100644
--- a/deps/npm/docs/output/commands/npm-root.html
+++ b/deps/npm/docs/output/commands/npm-root.html
@@ -141,9 +141,9 @@
-
+
npm-root
- @10.9.2
+ @10.9.3
Display npm root
diff --git a/deps/npm/docs/output/commands/npm-run-script.html b/deps/npm/docs/output/commands/npm-run-script.html
index 9c0ef4fedbc16e..398c154096478c 100644
--- a/deps/npm/docs/output/commands/npm-run-script.html
+++ b/deps/npm/docs/output/commands/npm-run-script.html
@@ -141,9 +141,9 @@
-
+
npm-run-script
- @10.9.2
+ @10.9.3
Run arbitrary package scripts
diff --git a/deps/npm/docs/output/commands/npm-sbom.html b/deps/npm/docs/output/commands/npm-sbom.html
index b648df1654e8a6..1a5127e028f826 100644
--- a/deps/npm/docs/output/commands/npm-sbom.html
+++ b/deps/npm/docs/output/commands/npm-sbom.html
@@ -141,9 +141,9 @@
-
+
npm-sbom
- @10.9.2
+ @10.9.3
Generate a Software Bill of Materials (SBOM)
diff --git a/deps/npm/docs/output/commands/npm-search.html b/deps/npm/docs/output/commands/npm-search.html
index bfd70c2a8abe92..dd7ee35f3c8cda 100644
--- a/deps/npm/docs/output/commands/npm-search.html
+++ b/deps/npm/docs/output/commands/npm-search.html
@@ -141,9 +141,9 @@
-
+
npm-search
- @10.9.2
+ @10.9.3
Search for packages
diff --git a/deps/npm/docs/output/commands/npm-shrinkwrap.html b/deps/npm/docs/output/commands/npm-shrinkwrap.html
index 60d198f85ce67e..f2e9a00fc60271 100644
--- a/deps/npm/docs/output/commands/npm-shrinkwrap.html
+++ b/deps/npm/docs/output/commands/npm-shrinkwrap.html
@@ -141,9 +141,9 @@
-
+
npm-shrinkwrap
- @10.9.2
+ @10.9.3
Lock down dependency versions for publication
diff --git a/deps/npm/docs/output/commands/npm-star.html b/deps/npm/docs/output/commands/npm-star.html
index ccda8bb3297d70..e68322f6acd4c4 100644
--- a/deps/npm/docs/output/commands/npm-star.html
+++ b/deps/npm/docs/output/commands/npm-star.html
@@ -141,9 +141,9 @@
-
+
npm-star
- @10.9.2
+ @10.9.3
Mark your favorite packages
diff --git a/deps/npm/docs/output/commands/npm-stars.html b/deps/npm/docs/output/commands/npm-stars.html
index 2f9619190f0122..c465029671edd8 100644
--- a/deps/npm/docs/output/commands/npm-stars.html
+++ b/deps/npm/docs/output/commands/npm-stars.html
@@ -141,9 +141,9 @@
-
+
npm-stars
- @10.9.2
+ @10.9.3
View packages marked as favorites
diff --git a/deps/npm/docs/output/commands/npm-start.html b/deps/npm/docs/output/commands/npm-start.html
index fad3ce05c3a2c9..93452be11a1e23 100644
--- a/deps/npm/docs/output/commands/npm-start.html
+++ b/deps/npm/docs/output/commands/npm-start.html
@@ -141,9 +141,9 @@
-
+
npm-start
- @10.9.2
+ @10.9.3
Start a package
diff --git a/deps/npm/docs/output/commands/npm-stop.html b/deps/npm/docs/output/commands/npm-stop.html
index bc70086d9991d5..fe8894b78730b4 100644
--- a/deps/npm/docs/output/commands/npm-stop.html
+++ b/deps/npm/docs/output/commands/npm-stop.html
@@ -141,9 +141,9 @@
-
+
npm-stop
- @10.9.2
+ @10.9.3
Stop a package
diff --git a/deps/npm/docs/output/commands/npm-team.html b/deps/npm/docs/output/commands/npm-team.html
index e03442dc68bf9c..9ce5c3ce2a2803 100644
--- a/deps/npm/docs/output/commands/npm-team.html
+++ b/deps/npm/docs/output/commands/npm-team.html
@@ -141,9 +141,9 @@
-
+
npm-team
- @10.9.2
+ @10.9.3
Manage organization teams and team memberships
diff --git a/deps/npm/docs/output/commands/npm-test.html b/deps/npm/docs/output/commands/npm-test.html
index b30af4ed9b3b30..0c43d8f32c68bc 100644
--- a/deps/npm/docs/output/commands/npm-test.html
+++ b/deps/npm/docs/output/commands/npm-test.html
@@ -141,9 +141,9 @@
-
+
npm-test
- @10.9.2
+ @10.9.3
Test a package
diff --git a/deps/npm/docs/output/commands/npm-token.html b/deps/npm/docs/output/commands/npm-token.html
index 353709607b4bd4..8f3a5a18e573ec 100644
--- a/deps/npm/docs/output/commands/npm-token.html
+++ b/deps/npm/docs/output/commands/npm-token.html
@@ -141,9 +141,9 @@
-
+
npm-token
- @10.9.2
+ @10.9.3
Manage your authentication tokens
diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html
index 633dbbe58f933e..fe8b4d2049439e 100644
--- a/deps/npm/docs/output/commands/npm-uninstall.html
+++ b/deps/npm/docs/output/commands/npm-uninstall.html
@@ -141,9 +141,9 @@
-
+
npm-uninstall
- @10.9.2
+ @10.9.3
Remove a package
diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html
index e4f4090936d4ff..abe90f92af34ac 100644
--- a/deps/npm/docs/output/commands/npm-unpublish.html
+++ b/deps/npm/docs/output/commands/npm-unpublish.html
@@ -141,9 +141,9 @@
-
+
npm-unpublish
- @10.9.2
+ @10.9.3
Remove a package from the registry
diff --git a/deps/npm/docs/output/commands/npm-unstar.html b/deps/npm/docs/output/commands/npm-unstar.html
index 741ed8e707a4f9..71b98a1777d074 100644
--- a/deps/npm/docs/output/commands/npm-unstar.html
+++ b/deps/npm/docs/output/commands/npm-unstar.html
@@ -141,9 +141,9 @@
-
+
npm-unstar
- @10.9.2
+ @10.9.3
Remove an item from your favorite packages
diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html
index 287ed19fe9f5f6..e69df00bca2963 100644
--- a/deps/npm/docs/output/commands/npm-update.html
+++ b/deps/npm/docs/output/commands/npm-update.html
@@ -141,9 +141,9 @@
-
+
npm-update
- @10.9.2
+ @10.9.3
Update packages
diff --git a/deps/npm/docs/output/commands/npm-version.html b/deps/npm/docs/output/commands/npm-version.html
index 43b978ffa94c80..d2ab3b88f85ab9 100644
--- a/deps/npm/docs/output/commands/npm-version.html
+++ b/deps/npm/docs/output/commands/npm-version.html
@@ -141,9 +141,9 @@
-
+
npm-version
- @10.9.2
+ @10.9.3
Bump a package version
diff --git a/deps/npm/docs/output/commands/npm-view.html b/deps/npm/docs/output/commands/npm-view.html
index 1e388cfb922462..4311e6b8c5d9f2 100644
--- a/deps/npm/docs/output/commands/npm-view.html
+++ b/deps/npm/docs/output/commands/npm-view.html
@@ -141,9 +141,9 @@
-
+
npm-view
- @10.9.2
+ @10.9.3
View registry info
diff --git a/deps/npm/docs/output/commands/npm-whoami.html b/deps/npm/docs/output/commands/npm-whoami.html
index 944a762ad4aea1..c24440bd1e59d5 100644
--- a/deps/npm/docs/output/commands/npm-whoami.html
+++ b/deps/npm/docs/output/commands/npm-whoami.html
@@ -141,9 +141,9 @@
-
+
npm-whoami
- @10.9.2
+ @10.9.3
Display npm username
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html
index eae0fdc7b20659..6e27691a0b2a56 100644
--- a/deps/npm/docs/output/commands/npm.html
+++ b/deps/npm/docs/output/commands/npm.html
@@ -141,9 +141,9 @@
-
+
npm
- @10.9.2
+ @10.9.3
javascript package manager
@@ -158,7 +158,7 @@ Table of contents
Note: This command is unaware of workspaces.
Version
-10.9.2
+10.9.3
Description
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html
index ec5547306359f7..83a4ce10c5d91b 100644
--- a/deps/npm/docs/output/commands/npx.html
+++ b/deps/npm/docs/output/commands/npx.html
@@ -141,9 +141,9 @@
-
+
npx
- @10.9.2
+ @10.9.3
Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/configuring-npm/folders.html b/deps/npm/docs/output/configuring-npm/folders.html
index daca0c019a942b..74970ccf0445c4 100644
--- a/deps/npm/docs/output/configuring-npm/folders.html
+++ b/deps/npm/docs/output/configuring-npm/folders.html
@@ -141,9 +141,9 @@
-
+
folders
- @10.9.2
+ @10.9.3
Folder Structures Used by npm
diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html
index abda3bcc06ab62..815253042195a2 100644
--- a/deps/npm/docs/output/configuring-npm/install.html
+++ b/deps/npm/docs/output/configuring-npm/install.html
@@ -141,9 +141,9 @@
-
+
install
- @10.9.2
+ @10.9.3
Download and install node and npm
diff --git a/deps/npm/docs/output/configuring-npm/npm-global.html b/deps/npm/docs/output/configuring-npm/npm-global.html
index daca0c019a942b..74970ccf0445c4 100644
--- a/deps/npm/docs/output/configuring-npm/npm-global.html
+++ b/deps/npm/docs/output/configuring-npm/npm-global.html
@@ -141,9 +141,9 @@
-
+
folders
- @10.9.2
+ @10.9.3
Folder Structures Used by npm
diff --git a/deps/npm/docs/output/configuring-npm/npm-json.html b/deps/npm/docs/output/configuring-npm/npm-json.html
index 1645e91a762b8f..dc128687164d57 100644
--- a/deps/npm/docs/output/configuring-npm/npm-json.html
+++ b/deps/npm/docs/output/configuring-npm/npm-json.html
@@ -141,9 +141,9 @@
-
+
package.json
- @10.9.2
+ @10.9.3
Specifics of npm's package.json handling
@@ -622,7 +622,7 @@ dependencies
user/repo See 'GitHub URLs' below
tag A specific version tagged and published as tag See npm dist-tag
path/path/path See Local Paths below
-npm:@scope/pkg@version Custom alias for a pacakge See package-spec
+npm:@scope/pkg@version Custom alias for a package See package-spec
For example, these are all valid:
{
diff --git a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
index 65f0b0c184bef9..bc586d135c0ac0 100644
--- a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
+++ b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html
@@ -141,9 +141,9 @@
-
+
npm-shrinkwrap.json
- @10.9.2
+ @10.9.3
A publishable lockfile
diff --git a/deps/npm/docs/output/configuring-npm/npmrc.html b/deps/npm/docs/output/configuring-npm/npmrc.html
index e5e94afe63a94c..6f961dad20b536 100644
--- a/deps/npm/docs/output/configuring-npm/npmrc.html
+++ b/deps/npm/docs/output/configuring-npm/npmrc.html
@@ -141,9 +141,9 @@
-
+
npmrc
- @10.9.2
+ @10.9.3
The npm config files
diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html
index 1645e91a762b8f..dc128687164d57 100644
--- a/deps/npm/docs/output/configuring-npm/package-json.html
+++ b/deps/npm/docs/output/configuring-npm/package-json.html
@@ -141,9 +141,9 @@
-
+
package.json
- @10.9.2
+ @10.9.3
Specifics of npm's package.json handling
@@ -622,7 +622,7 @@ dependencies
user/repo See 'GitHub URLs' below
tag A specific version tagged and published as tag See npm dist-tag
path/path/path See Local Paths below
-npm:@scope/pkg@version Custom alias for a pacakge See package-spec
+npm:@scope/pkg@version Custom alias for a package See package-spec
For example, these are all valid:
{
diff --git a/deps/npm/docs/output/configuring-npm/package-lock-json.html b/deps/npm/docs/output/configuring-npm/package-lock-json.html
index 80fa8bf8bd4ac8..6c302f9228c5e4 100644
--- a/deps/npm/docs/output/configuring-npm/package-lock-json.html
+++ b/deps/npm/docs/output/configuring-npm/package-lock-json.html
@@ -141,9 +141,9 @@
-
+
package-lock.json
- @10.9.2
+ @10.9.3
A manifestation of the manifest
diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html
index 487cc56439e9ed..3348147f3ea63f 100644
--- a/deps/npm/docs/output/using-npm/config.html
+++ b/deps/npm/docs/output/using-npm/config.html
@@ -141,9 +141,9 @@
-
+
config
- @10.9.2
+ @10.9.3
More than you probably want to know about npm configuration
diff --git a/deps/npm/docs/output/using-npm/dependency-selectors.html b/deps/npm/docs/output/using-npm/dependency-selectors.html
index 24e34408de9c52..ed512664d165e1 100644
--- a/deps/npm/docs/output/using-npm/dependency-selectors.html
+++ b/deps/npm/docs/output/using-npm/dependency-selectors.html
@@ -141,9 +141,9 @@
-
+
Dependency Selector Syntax & Querying
- @10.9.2
+ @10.9.3
Dependency Selector Syntax & Querying
diff --git a/deps/npm/docs/output/using-npm/developers.html b/deps/npm/docs/output/using-npm/developers.html
index d7848c6e6647f3..e1a4ae53c7269a 100644
--- a/deps/npm/docs/output/using-npm/developers.html
+++ b/deps/npm/docs/output/using-npm/developers.html
@@ -141,9 +141,9 @@
-
+
developers
- @10.9.2
+ @10.9.3
Developer Guide
diff --git a/deps/npm/docs/output/using-npm/logging.html b/deps/npm/docs/output/using-npm/logging.html
index d1e6b0a7532253..af069e4cc3df2f 100644
--- a/deps/npm/docs/output/using-npm/logging.html
+++ b/deps/npm/docs/output/using-npm/logging.html
@@ -141,9 +141,9 @@
-
+
Logging
- @10.9.2
+ @10.9.3
Why, What & How We Log
diff --git a/deps/npm/docs/output/using-npm/orgs.html b/deps/npm/docs/output/using-npm/orgs.html
index 5b7007b3618406..4f106df86b5d97 100644
--- a/deps/npm/docs/output/using-npm/orgs.html
+++ b/deps/npm/docs/output/using-npm/orgs.html
@@ -141,9 +141,9 @@
-
+
orgs
- @10.9.2
+ @10.9.3
Working with Teams & Orgs
diff --git a/deps/npm/docs/output/using-npm/package-spec.html b/deps/npm/docs/output/using-npm/package-spec.html
index eee81153269653..3cca23ec1b425e 100644
--- a/deps/npm/docs/output/using-npm/package-spec.html
+++ b/deps/npm/docs/output/using-npm/package-spec.html
@@ -141,9 +141,9 @@
-
+
package-spec
- @10.9.2
+ @10.9.3
Package name specifier
diff --git a/deps/npm/docs/output/using-npm/registry.html b/deps/npm/docs/output/using-npm/registry.html
index bd464be3fd67c9..42ae54b5209a17 100644
--- a/deps/npm/docs/output/using-npm/registry.html
+++ b/deps/npm/docs/output/using-npm/registry.html
@@ -141,9 +141,9 @@
-
+
registry
- @10.9.2
+ @10.9.3
The JavaScript Package Registry
diff --git a/deps/npm/docs/output/using-npm/removal.html b/deps/npm/docs/output/using-npm/removal.html
index f0ccb74eb50811..459186018ecaec 100644
--- a/deps/npm/docs/output/using-npm/removal.html
+++ b/deps/npm/docs/output/using-npm/removal.html
@@ -141,9 +141,9 @@
-
+
removal
- @10.9.2
+ @10.9.3
Cleaning the Slate
diff --git a/deps/npm/docs/output/using-npm/scope.html b/deps/npm/docs/output/using-npm/scope.html
index 35ed91f147f107..8882c3c02de5f3 100644
--- a/deps/npm/docs/output/using-npm/scope.html
+++ b/deps/npm/docs/output/using-npm/scope.html
@@ -141,9 +141,9 @@
-
+
scope
- @10.9.2
+ @10.9.3
Scoped packages
diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html
index 9982862814e73a..81b76cc15e4d9d 100644
--- a/deps/npm/docs/output/using-npm/scripts.html
+++ b/deps/npm/docs/output/using-npm/scripts.html
@@ -141,9 +141,9 @@
-
+
scripts
- @10.9.2
+ @10.9.3
How npm handles the "scripts" field
diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html
index 5e2756479c7636..6d2344b8b392e0 100644
--- a/deps/npm/docs/output/using-npm/workspaces.html
+++ b/deps/npm/docs/output/using-npm/workspaces.html
@@ -141,9 +141,9 @@
-
+
workspaces
- @10.9.2
+ @10.9.3
Working with workspaces
diff --git a/deps/npm/lib/commands/diff.js b/deps/npm/lib/commands/diff.js
index 3fa8090a350468..6e9160cf623cd2 100644
--- a/deps/npm/lib/commands/diff.js
+++ b/deps/npm/lib/commands/diff.js
@@ -106,7 +106,7 @@ class Diff extends BaseCommand {
const pkgName = await this.packageName()
return [
`${pkgName}@${this.npm.config.get('tag')}`,
- `file:${this.prefix.replace(/#/g, '%23')}`,
+ `file:${this.prefix}`,
]
}
@@ -134,7 +134,7 @@ class Diff extends BaseCommand {
}
return [
`${pkgName}@${a}`,
- `file:${this.prefix.replace(/#/g, '%23')}`,
+ `file:${this.prefix}`,
]
}
@@ -166,7 +166,7 @@ class Diff extends BaseCommand {
}
return [
`${spec.name}@${spec.fetchSpec}`,
- `file:${this.prefix.replace(/#/g, '%23')}`,
+ `file:${this.prefix}`,
]
}
@@ -179,7 +179,7 @@ class Diff extends BaseCommand {
}
}
- const aSpec = `file:${node.realpath.replace(/#/g, '%23')}`
+ const aSpec = `file:${node.realpath}`
// finds what version of the package to compare against, if a exact
// version or tag was passed than it should use that, otherwise
@@ -212,8 +212,8 @@ class Diff extends BaseCommand {
]
} else if (spec.type === 'directory') {
return [
- `file:${spec.fetchSpec.replace(/#/g, '%23')}`,
- `file:${this.prefix.replace(/#/g, '%23')}`,
+ `file:${spec.fetchSpec}`,
+ `file:${this.prefix}`,
]
} else {
throw this.usageError(`Spec type ${spec.type} not supported.`)
@@ -281,7 +281,7 @@ class Diff extends BaseCommand {
const res = !node || !node.package || !node.package.version
? spec.fetchSpec
- : `file:${node.realpath.replace(/#/g, '%23')}`
+ : `file:${node.realpath}`
return `${spec.name}@${res}`
})
diff --git a/deps/npm/lib/commands/link.js b/deps/npm/lib/commands/link.js
index 8a41548d7f108b..4955a5b77d338b 100644
--- a/deps/npm/lib/commands/link.js
+++ b/deps/npm/lib/commands/link.js
@@ -124,7 +124,7 @@ class Link extends ArboristWorkspaceCmd {
...this.npm.flatOptions,
prune: false,
path: this.npm.prefix,
- add: names.map(l => `file:${resolve(globalTop, 'node_modules', l).replace(/#/g, '%23')}`),
+ add: names.map(l => `file:${resolve(globalTop, 'node_modules', l)}`),
save,
workspaces: this.workspaceNames,
})
@@ -135,7 +135,7 @@ class Link extends ArboristWorkspaceCmd {
async linkPkg () {
const wsp = this.workspacePaths
const paths = wsp && wsp.length ? wsp : [this.npm.prefix]
- const add = paths.map(path => `file:${path.replace(/#/g, '%23')}`)
+ const add = paths.map(path => `file:${path}`)
const globalTop = resolve(this.npm.globalDir, '..')
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
diff --git a/deps/npm/lib/utils/verify-signatures.js b/deps/npm/lib/utils/verify-signatures.js
index 09711581d11ddd..604f3741ce6dcc 100644
--- a/deps/npm/lib/utils/verify-signatures.js
+++ b/deps/npm/lib/utils/verify-signatures.js
@@ -2,7 +2,6 @@ const fetch = require('npm-registry-fetch')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const pacote = require('pacote')
-const pMap = require('p-map')
const tufClient = require('@sigstore/tuf')
const { log, output } = require('proc-log')
@@ -25,6 +24,7 @@ class VerifySignatures {
}
async run () {
+ const { default: pMap } = await import('p-map')
const start = process.hrtime.bigint()
// Find all deps in tree
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 96beab28fc365d..bbbbf89379a118 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM-ACCESS" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-ACCESS" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-access\fR - Set access level on published packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index 0096f28229122d..dc99f5862a6224 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM-ADDUSER" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-ADDUSER" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-adduser\fR - Add a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index 38b977d3871d59..fd5031ea4a819b 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -1,4 +1,4 @@
-.TH "NPM-AUDIT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-AUDIT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-audit\fR - Run a security audit
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index 92c57c2c3c4146..ead9e3c7e4c87f 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM-BUGS" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-BUGS" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-bugs\fR - Report bugs for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index da188a0914e016..fb6c1be15e3ea1 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM-CACHE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-CACHE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-cache\fR - Manipulates packages cache
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1
index 1ae5e934e2d2b7..49b3f4bc8bf791 100644
--- a/deps/npm/man/man1/npm-ci.1
+++ b/deps/npm/man/man1/npm-ci.1
@@ -1,4 +1,4 @@
-.TH "NPM-CI" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-CI" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-ci\fR - Clean install a project
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index ea44a0c9c56a51..6416d406ecd92a 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM-COMPLETION" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-COMPLETION" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-completion\fR - Tab Completion for npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index dbe609ba3b727f..033fb38d993690 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM-CONFIG" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-CONFIG" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-config\fR - Manage the npm configuration files
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index 530de6344d5f4e..bd91a82c58aa3d 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEDUPE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DEDUPE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-dedupe\fR - Reduce duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index 0333772b97c690..3923544b3c7f03 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEPRECATE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DEPRECATE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-deprecate\fR - Deprecate a version of a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1
index 168f4397430854..5e7d8bb4b2a91a 100644
--- a/deps/npm/man/man1/npm-diff.1
+++ b/deps/npm/man/man1/npm-diff.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIFF" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DIFF" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-diff\fR - The registry diff command
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 97e2927ac1e2fa..a29037b85b5596 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIST-TAG" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DIST-TAG" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-dist-tag\fR - Modify package distribution tags
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 6b09a835bbf318..cd96969eef56e3 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCS" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DOCS" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-docs\fR - Open documentation for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index 9a0374b0896b52..f29b59c40365e4 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCTOR" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-DOCTOR" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-doctor\fR - Check the health of your npm environment
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index 779feeede13656..aba315c84af6be 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM-EDIT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-EDIT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-edit\fR - Edit an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1
index 3b5fe4c425ee3e..d8f6e9a0d7c207 100644
--- a/deps/npm/man/man1/npm-exec.1
+++ b/deps/npm/man/man1/npm-exec.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXEC" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-EXEC" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-exec\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1
index 5f86d19236fdb0..a63bc7448170a5 100644
--- a/deps/npm/man/man1/npm-explain.1
+++ b/deps/npm/man/man1/npm-explain.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLAIN" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-EXPLAIN" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-explain\fR - Explain installed packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index eb71089b62446f..b1feb37f97e65a 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLORE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-EXPLORE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-explore\fR - Browse an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1
index 88a42adcec1f20..cb1988b6c87353 100644
--- a/deps/npm/man/man1/npm-find-dupes.1
+++ b/deps/npm/man/man1/npm-find-dupes.1
@@ -1,4 +1,4 @@
-.TH "NPM-FIND-DUPES" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-FIND-DUPES" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-find-dupes\fR - Find duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1
index 31c5ceb7ff8c1f..e56db03a460f49 100644
--- a/deps/npm/man/man1/npm-fund.1
+++ b/deps/npm/man/man1/npm-fund.1
@@ -1,4 +1,4 @@
-.TH "NPM-FUND" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-FUND" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-fund\fR - Retrieve funding information
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index c954795c2778c7..71ee13917ab43e 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP-SEARCH" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-HELP-SEARCH" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-help-search\fR - Search npm help documentation
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index 2d0fc6e3a2c0e7..5da01d0591d8b9 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-HELP" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-help\fR - Get help on npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1
index 8d5d9334692b9e..af8c6446d92ae9 100644
--- a/deps/npm/man/man1/npm-hook.1
+++ b/deps/npm/man/man1/npm-hook.1
@@ -1,4 +1,4 @@
-.TH "NPM-HOOK" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-HOOK" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-hook\fR - Manage registry hooks
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index 1c1b008baa1deb..c9388088caefc5 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM-INIT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-INIT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-init\fR - Create a package.json file
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1
index 4bbbb51140521b..be84290240e8d2 100644
--- a/deps/npm/man/man1/npm-install-ci-test.1
+++ b/deps/npm/man/man1/npm-install-ci-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-CI-TEST" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-INSTALL-CI-TEST" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index eef940c066c709..ce0efcd2f4b849 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-TEST" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-INSTALL-TEST" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-install-test\fR - Install package(s) and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index bbe4a43c10863e..fdebccd4e245b6 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-INSTALL" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-install\fR - Install a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index 9aca1f4a7fb80d..312c8db5ac892d 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM-LINK" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-LINK" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-link\fR - Symlink a package folder
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1
index a5e0cb71bb0d98..101d734340e2db 100644
--- a/deps/npm/man/man1/npm-login.1
+++ b/deps/npm/man/man1/npm-login.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGIN" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-LOGIN" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-login\fR - Login to a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 8ac69429ea320d..bb3862c28f171e 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGOUT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-LOGOUT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-logout\fR - Log out of the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 47d20120898b59..74df5f005fc1e4 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM-LS" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-LS" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-ls\fR - List installed packages
.SS "Synopsis"
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@10.9.2 /path/to/npm
+npm@10.9.3 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1
index 88df3325a344fd..126e3b5df7741b 100644
--- a/deps/npm/man/man1/npm-org.1
+++ b/deps/npm/man/man1/npm-org.1
@@ -1,4 +1,4 @@
-.TH "NPM-ORG" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-ORG" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-org\fR - Manage orgs
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index 17aabbd0857183..b1b0721e887ca4 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM-OUTDATED" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-OUTDATED" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-outdated\fR - Check for outdated packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index f01e67e6464e7f..63baf946d3ecdf 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM-OWNER" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-OWNER" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-owner\fR - Manage package owners
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index b9b56a31e30eef..e66633d4d6d9d0 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM-PACK" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PACK" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-pack\fR - Create a tarball from a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index 2d16431b030ea8..a3e21d4e3423d8 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM-PING" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PING" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-ping\fR - Ping npm registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1
index e55a0280d1211b..5c10939e657b83 100644
--- a/deps/npm/man/man1/npm-pkg.1
+++ b/deps/npm/man/man1/npm-pkg.1
@@ -1,4 +1,4 @@
-.TH "NPM-PKG" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PKG" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-pkg\fR - Manages your package.json
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index dca8d0c47497b4..1e830ee1fbc9dd 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM-PREFIX" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PREFIX" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-prefix\fR - Display prefix
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1
index 89837b408f5a67..b59bd0d47c342d 100644
--- a/deps/npm/man/man1/npm-profile.1
+++ b/deps/npm/man/man1/npm-profile.1
@@ -1,4 +1,4 @@
-.TH "NPM-PROFILE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PROFILE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-profile\fR - Change settings on your registry profile
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index 243426dffaf20f..97967d37fd50df 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM-PRUNE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PRUNE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-prune\fR - Remove extraneous packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 1110b097182988..92268682bf9ba3 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM-PUBLISH" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-PUBLISH" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-publish\fR - Publish a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1
index 983f2cad388940..6733ebaf1024f2 100644
--- a/deps/npm/man/man1/npm-query.1
+++ b/deps/npm/man/man1/npm-query.1
@@ -1,4 +1,4 @@
-.TH "NPM-QUERY" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-QUERY" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-query\fR - Dependency selector query
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index 396d9adf779cb1..73ccb5381b8cba 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM-REBUILD" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-REBUILD" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-rebuild\fR - Rebuild a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index b9c7bbdfbc233c..1b1ddce412c714 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM-REPO" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-REPO" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-repo\fR - Open package repository page in the browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index 1112bf9180cece..b47dde96e7999e 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM-RESTART" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-RESTART" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-restart\fR - Restart a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 2456bdba6d1816..5f9f4ca6484242 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM-ROOT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-ROOT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-root\fR - Display npm root
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index a05362361848d2..34d6986b082560 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM-RUN-SCRIPT" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-RUN-SCRIPT" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-run-script\fR - Run arbitrary package scripts
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1
index f784a53edd4c08..5f218c62a4f80c 100644
--- a/deps/npm/man/man1/npm-sbom.1
+++ b/deps/npm/man/man1/npm-sbom.1
@@ -1,4 +1,4 @@
-.TH "NPM-SBOM" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-SBOM" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM)
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 3ee7bcb7cb4e92..6dca287b328cc8 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-SEARCH" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-SEARCH" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-search\fR - Search for packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index d7e64493a90754..0d000c2cd96204 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-SHRINKWRAP" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR - Lock down dependency versions for publication
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index 7c3a528ec3a04f..780817c02045b1 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM-STAR" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-STAR" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-star\fR - Mark your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index 61d60d9534da40..b42f0b103e6b42 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM-STARS" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-STARS" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-stars\fR - View packages marked as favorites
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index 4c687a1ecba49e..2db8bca1089ea5 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM-START" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-START" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-start\fR - Start a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index dadebe98c52834..f32d59c82db753 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM-STOP" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-STOP" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-stop\fR - Stop a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 42ee63f4743b6e..fd034c0705e28a 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEAM" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-TEAM" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-team\fR - Manage organization teams and team memberships
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index 80eb6a1d7bb9e7..56959e9995d4b1 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEST" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-TEST" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-test\fR - Test a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1
index 1d14f81553b5c9..5c45170b49472a 100644
--- a/deps/npm/man/man1/npm-token.1
+++ b/deps/npm/man/man1/npm-token.1
@@ -1,4 +1,4 @@
-.TH "NPM-TOKEN" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-TOKEN" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-token\fR - Manage your authentication tokens
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index f5e25641fc57c5..e59cc5a422a7fb 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNINSTALL" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-UNINSTALL" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-uninstall\fR - Remove a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index fcf62bbd7da263..98cc5faa23f7ff 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNPUBLISH" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-UNPUBLISH" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-unpublish\fR - Remove a package from the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1
index e9edf3ab6634dd..c43c249b090a54 100644
--- a/deps/npm/man/man1/npm-unstar.1
+++ b/deps/npm/man/man1/npm-unstar.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNSTAR" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-UNSTAR" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-unstar\fR - Remove an item from your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 052008d73485c9..2f671212598076 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM-UPDATE" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-UPDATE" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-update\fR - Update packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index f79a0c84c3f8fd..036031994e1313 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM-VERSION" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-VERSION" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-version\fR - Bump a package version
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index e862ddc4b8780d..2a7d646379ec4e 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM-VIEW" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-VIEW" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-view\fR - View registry info
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index a003f634a2b293..d3f9596e60684a 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM-WHOAMI" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-WHOAMI" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-whoami\fR - Display npm username
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 28e8774ad1b4d0..7f36fc2e7530f2 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPM" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm\fR - javascript package manager
.SS "Synopsis"
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-10.9.2
+10.9.3
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index 9b4ba1af1dc365..294bff4b69c5e5 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "December 2024" "NPM@10.9.2" ""
+.TH "NPX" "1" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpx\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5
index 8eb8a8230333f9..1ab9ef3e750ebb 100644
--- a/deps/npm/man/man5/folders.5
+++ b/deps/npm/man/man5/folders.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "December 2024" "NPM@10.9.2" ""
+.TH "FOLDERS" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5
index 0b75adf308685a..d93b5141304c1f 100644
--- a/deps/npm/man/man5/install.5
+++ b/deps/npm/man/man5/install.5
@@ -1,4 +1,4 @@
-.TH "INSTALL" "5" "December 2024" "NPM@10.9.2" ""
+.TH "INSTALL" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBinstall\fR - Download and install node and npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 8eb8a8230333f9..1ab9ef3e750ebb 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "December 2024" "NPM@10.9.2" ""
+.TH "FOLDERS" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index 81fa9a8c95b8dc..b46a1ddb4ad57f 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "December 2024" "NPM@10.9.2" ""
+.TH "PACKAGE.JSON" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -589,7 +589,7 @@ See \fBsemver\fR \fI\(lahttps://github.com/npm/node-semver#versions\(ra\fR for m
.IP \(bu 4
\fBpath/path/path\fR See \fBLocal Paths\fR \fI(Local Paths)\fR below
.IP \(bu 4
-\fBnpm:@scope/pkg@version\fR Custom alias for a pacakge See \fB\fBpackage-spec\fR\fR \fI\(la/using-npm/package-spec#aliases\(ra\fR
+\fBnpm:@scope/pkg@version\fR Custom alias for a package See \fB\fBpackage-spec\fR\fR \fI\(la/using-npm/package-spec#aliases\(ra\fR
.RE 0
.P
diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5
index 7e8efb285e959e..67a9d094c397ae 100644
--- a/deps/npm/man/man5/npm-shrinkwrap-json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap-json.5
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP.JSON" "5" "December 2024" "NPM@10.9.2" ""
+.TH "NPM-SHRINKWRAP.JSON" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR - A publishable lockfile
.SS "Description"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 947a0fe433faa3..d8e39ef4b547aa 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "December 2024" "NPM@10.9.2" ""
+.TH "NPMRC" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBnpmrc\fR - The npm config files
.SS "Description"
diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5
index 81fa9a8c95b8dc..b46a1ddb4ad57f 100644
--- a/deps/npm/man/man5/package-json.5
+++ b/deps/npm/man/man5/package-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "December 2024" "NPM@10.9.2" ""
+.TH "PACKAGE.JSON" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -589,7 +589,7 @@ See \fBsemver\fR \fI\(lahttps://github.com/npm/node-semver#versions\(ra\fR for m
.IP \(bu 4
\fBpath/path/path\fR See \fBLocal Paths\fR \fI(Local Paths)\fR below
.IP \(bu 4
-\fBnpm:@scope/pkg@version\fR Custom alias for a pacakge See \fB\fBpackage-spec\fR\fR \fI\(la/using-npm/package-spec#aliases\(ra\fR
+\fBnpm:@scope/pkg@version\fR Custom alias for a package See \fB\fBpackage-spec\fR\fR \fI\(la/using-npm/package-spec#aliases\(ra\fR
.RE 0
.P
diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5
index 78deecab3757c1..b9cfdf5263f3c6 100644
--- a/deps/npm/man/man5/package-lock-json.5
+++ b/deps/npm/man/man5/package-lock-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE-LOCK.JSON" "5" "December 2024" "NPM@10.9.2" ""
+.TH "PACKAGE-LOCK.JSON" "5" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBpackage-lock.json\fR - A manifestation of the manifest
.SS "Description"
diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7
index 0ce1f25f38d5d3..42bfab4fea453b 100644
--- a/deps/npm/man/man7/config.7
+++ b/deps/npm/man/man7/config.7
@@ -1,4 +1,4 @@
-.TH "CONFIG" "7" "December 2024" "NPM@10.9.2" ""
+.TH "CONFIG" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBconfig\fR - More than you probably want to know about npm configuration
.SS "Description"
diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7
index e15648cb400af4..ef646e12ec8f3f 100644
--- a/deps/npm/man/man7/dependency-selectors.7
+++ b/deps/npm/man/man7/dependency-selectors.7
@@ -1,4 +1,4 @@
-.TH "QUERYING" "7" "December 2024" "NPM@10.9.2" ""
+.TH "QUERYING" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBQuerying\fR - Dependency Selector Syntax & Querying
.SS "Description"
diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7
index a5d429c60a5cef..f7874d9f36fdb1 100644
--- a/deps/npm/man/man7/developers.7
+++ b/deps/npm/man/man7/developers.7
@@ -1,4 +1,4 @@
-.TH "DEVELOPERS" "7" "December 2024" "NPM@10.9.2" ""
+.TH "DEVELOPERS" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBdevelopers\fR - Developer Guide
.SS "Description"
diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7
index 200ad578584521..b0692f6df78c4c 100644
--- a/deps/npm/man/man7/logging.7
+++ b/deps/npm/man/man7/logging.7
@@ -1,4 +1,4 @@
-.TH "LOGGING" "7" "December 2024" "NPM@10.9.2" ""
+.TH "LOGGING" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBLogging\fR - Why, What & How We Log
.SS "Description"
diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7
index 8f12f6e0434a19..5340af005eca40 100644
--- a/deps/npm/man/man7/orgs.7
+++ b/deps/npm/man/man7/orgs.7
@@ -1,4 +1,4 @@
-.TH "ORGS" "7" "December 2024" "NPM@10.9.2" ""
+.TH "ORGS" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBorgs\fR - Working with Teams & Orgs
.SS "Description"
diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7
index f8a0ac73903272..5ec5febb66f839 100644
--- a/deps/npm/man/man7/package-spec.7
+++ b/deps/npm/man/man7/package-spec.7
@@ -1,4 +1,4 @@
-.TH "PACKAGE-SPEC" "7" "December 2024" "NPM@10.9.2" ""
+.TH "PACKAGE-SPEC" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBpackage-spec\fR - Package name specifier
.SS "Description"
diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7
index 1e8134daaf751d..c97d3f5576749b 100644
--- a/deps/npm/man/man7/registry.7
+++ b/deps/npm/man/man7/registry.7
@@ -1,4 +1,4 @@
-.TH "REGISTRY" "7" "December 2024" "NPM@10.9.2" ""
+.TH "REGISTRY" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBregistry\fR - The JavaScript Package Registry
.SS "Description"
diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7
index 74a7703f177e47..18905b74db6e16 100644
--- a/deps/npm/man/man7/removal.7
+++ b/deps/npm/man/man7/removal.7
@@ -1,4 +1,4 @@
-.TH "REMOVAL" "7" "December 2024" "NPM@10.9.2" ""
+.TH "REMOVAL" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBremoval\fR - Cleaning the Slate
.SS "Synopsis"
diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7
index ccf0d9e7ee99c4..352732b7e1264c 100644
--- a/deps/npm/man/man7/scope.7
+++ b/deps/npm/man/man7/scope.7
@@ -1,4 +1,4 @@
-.TH "SCOPE" "7" "December 2024" "NPM@10.9.2" ""
+.TH "SCOPE" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBscope\fR - Scoped packages
.SS "Description"
diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7
index 1758473fc89dbb..330c8ce424b140 100644
--- a/deps/npm/man/man7/scripts.7
+++ b/deps/npm/man/man7/scripts.7
@@ -1,4 +1,4 @@
-.TH "SCRIPTS" "7" "December 2024" "NPM@10.9.2" ""
+.TH "SCRIPTS" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBscripts\fR - How npm handles the "scripts" field
.SS "Description"
diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7
index cd7df95b81cf3e..9e71ab90ee4ea0 100644
--- a/deps/npm/man/man7/workspaces.7
+++ b/deps/npm/man/man7/workspaces.7
@@ -1,4 +1,4 @@
-.TH "WORKSPACES" "7" "December 2024" "NPM@10.9.2" ""
+.TH "WORKSPACES" "7" "June 2025" "NPM@10.9.3" ""
.SH "NAME"
\fBworkspaces\fR - Working with workspaces
.SS "Description"
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
index 6bd4e9407e72d6..54f86dea0f65c0 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
@@ -447,7 +447,7 @@ module.exports = cls => class IdealTreeBuilder extends cls {
.catch(/* istanbul ignore next */ () => null)
if (st && st.isSymbolicLink()) {
const target = await readlink(dir)
- const real = resolve(dirname(dir), target).replace(/#/g, '%23')
+ const real = resolve(dirname(dir), target)
tree.package.dependencies[name] = `file:${real}`
} else {
tree.package.dependencies[name] = '*'
@@ -522,12 +522,12 @@ module.exports = cls => class IdealTreeBuilder extends cls {
const { name } = spec
if (spec.type === 'file') {
- spec = npa(`file:${relpath(path, spec.fetchSpec).replace(/#/g, '%23')}`, path)
+ spec = npa(`file:${relpath(path, spec.fetchSpec)}`, path)
spec.name = name
} else if (spec.type === 'directory') {
try {
const real = await realpath(spec.fetchSpec, this[_rpcache], this[_stcache])
- spec = npa(`file:${relpath(path, real).replace(/#/g, '%23')}`, path)
+ spec = npa(`file:${relpath(path, real)}`, path)
spec.name = name
} catch {
// TODO: create synthetic test case to simulate realpath failure
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
index 22c1c2875f1b17..2add9553688a42 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
@@ -216,7 +216,7 @@ module.exports = cls => class ActualLoader extends cls {
const actualRoot = tree.isLink ? tree.target : tree
const { dependencies = {} } = actualRoot.package
for (const [name, kid] of actualRoot.children.entries()) {
- const def = kid.isLink ? `file:${kid.realpath.replace(/#/g, '%23')}` : '*'
+ const def = kid.isLink ? `file:${kid.realpath}` : '*'
dependencies[name] = dependencies[name] || def
}
actualRoot.package = { ...actualRoot.package, dependencies }
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
index 7c51f8b9bef795..07c986853913ee 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
@@ -149,7 +149,7 @@ module.exports = cls => class VirtualLoader extends cls {
})
for (const [name, path] of workspaces.entries()) {
- lockWS[name] = `file:${path.replace(/#/g, '%23')}`
+ lockWS[name] = `file:${path}`
}
// Should rootNames exclude optional?
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
index be920272d48f00..4083d79f4fa255 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -1364,7 +1364,7 @@ module.exports = cls => class Reifier extends cls {
// path initially, in which case we can end up with the wrong
// thing, so just get the ultimate fetchSpec and relativize it.
const p = req.fetchSpec.replace(/^file:/, '')
- const rel = relpath(addTree.realpath, p).replace(/#/g, '%23')
+ const rel = relpath(addTree.realpath, p)
newSpec = `file:${rel}`
}
} else {
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js
index 7c988048057c74..cbd3392a9300c7 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js
@@ -20,7 +20,7 @@ const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => {
raw,
} = npa(resolved, fromPath)
if (type === 'file' || type === 'directory') {
- const cleanFetchSpec = fetchSpec.replace(/#/g, '%23')
+ const cleanFetchSpec = fetchSpec
if (relPaths && toPath) {
return `file:${relpath(toPath, cleanFetchSpec)}`
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/link.js b/deps/npm/node_modules/@npmcli/arborist/lib/link.js
index 266ec45168839b..42bc1faf488609 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/link.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/link.js
@@ -99,7 +99,7 @@ class Link extends Node {
// the path/realpath guard is there for the benefit of setting
// these things in the "wrong" order
return this.path && this.realpath
- ? `file:${relpath(dirname(this.path), this.realpath).replace(/#/g, '%23')}`
+ ? `file:${relpath(dirname(this.path), this.realpath)}`
: null
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js
index c519a7b543d4db..dccbf99bf60804 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js
@@ -842,7 +842,7 @@ class Node {
}
for (const [name, path] of this.#workspaces.entries()) {
- new Edge({ from: this, name, spec: `file:${path.replace(/#/g, '%23')}`, type: 'workspace' })
+ new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' })
}
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
index 5f720ed9bd4404..11703fad4b9254 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js
@@ -817,7 +817,7 @@ class Shrinkwrap {
if (!/^file:/.test(resolved)) {
pathFixed = resolved
} else {
- pathFixed = `file:${resolve(this.path, resolved.slice(5)).replace(/#/g, '%23')}`
+ pathFixed = `file:${resolve(this.path, resolved.slice(5))}`
}
}
@@ -1011,7 +1011,7 @@ class Shrinkwrap {
}
if (node.isLink) {
- lock.version = `file:${relpath(this.path, node.realpath).replace(/#/g, '%23')}`
+ lock.version = `file:${relpath(this.path, node.realpath)}`
} else if (spec && (spec.type === 'file' || spec.type === 'remote')) {
lock.version = spec.saveSpec
} else if (spec && spec.type === 'git' || rSpec.type === 'git') {
@@ -1089,7 +1089,7 @@ class Shrinkwrap {
// this especially shows up with workspace edges when the root
// node is also a workspace in the set.
const p = resolve(node.realpath, spec.slice('file:'.length))
- set[k] = `file:${relpath(node.realpath, p).replace(/#/g, '%23')}`
+ set[k] = `file:${relpath(node.realpath, p)}`
} else {
set[k] = spec
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index b1e2b21a254635..9153139d2a8a87 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/arborist",
- "version": "8.0.0",
+ "version": "8.0.1",
"description": "Manage node_modules trees",
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
@@ -41,7 +41,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"benchmark": "^2.1.4",
"minify-registry-metadata": "^4.0.0",
"nock": "^13.3.3",
@@ -93,7 +93,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json
index 18c677393b5ff3..9e30f3cde6bca0 100644
--- a/deps/npm/node_modules/@npmcli/config/package.json
+++ b/deps/npm/node_modules/@npmcli/config/package.json
@@ -33,7 +33,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-globals": "^1.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"tap": "^16.3.8"
},
"dependencies": {
@@ -51,7 +51,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/@npmcli/git/lib/revs.js b/deps/npm/node_modules/@npmcli/git/lib/revs.js
index ca14837de1b876..ebcc848fa34584 100644
--- a/deps/npm/node_modules/@npmcli/git/lib/revs.js
+++ b/deps/npm/node_modules/@npmcli/git/lib/revs.js
@@ -1,14 +1,12 @@
-const pinflight = require('promise-inflight')
const spawn = require('./spawn.js')
const { LRUCache } = require('lru-cache')
+const linesToRevs = require('./lines-to-revs.js')
const revsCache = new LRUCache({
max: 100,
ttl: 5 * 60 * 1000,
})
-const linesToRevs = require('./lines-to-revs.js')
-
module.exports = async (repo, opts = {}) => {
if (!opts.noGitRevCache) {
const cached = revsCache.get(repo)
@@ -17,12 +15,8 @@ module.exports = async (repo, opts = {}) => {
}
}
- return pinflight(`ls-remote:${repo}`, () =>
- spawn(['ls-remote', repo], opts)
- .then(({ stdout }) => linesToRevs(stdout.trim().split('\n')))
- .then(revs => {
- revsCache.set(repo, revs)
- return revs
- })
- )
+ const { stdout } = await spawn(['ls-remote', repo], opts)
+ const revs = linesToRevs(stdout.trim().split('\n'))
+ revsCache.set(repo, revs)
+ return revs
}
diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json
index 2bc6730ba2151b..0880b2443d9fde 100644
--- a/deps/npm/node_modules/@npmcli/git/package.json
+++ b/deps/npm/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/git",
- "version": "6.0.1",
+ "version": "6.0.3",
"main": "lib/index.js",
"files": [
"bin/",
@@ -32,8 +32,8 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
- "npm-package-arg": "^11.0.0",
+ "@npmcli/template-oss": "4.24.1",
+ "npm-package-arg": "^12.0.1",
"slash": "^3.0.0",
"tap": "^16.0.1"
},
@@ -43,7 +43,6 @@
"lru-cache": "^10.0.1",
"npm-pick-manifest": "^10.0.0",
"proc-log": "^5.0.0",
- "promise-inflight": "^1.0.1",
"promise-retry": "^2.0.1",
"semver": "^7.3.5",
"which": "^5.0.0"
@@ -53,7 +52,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.1",
"publish": true
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/index.js b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
index 23f326dd59359f..7eff602d73a3f5 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
@@ -41,6 +41,7 @@ class PackageJson {
'binRefs',
'bundleDependencies',
'bundleDependenciesFalse',
+ 'fixName',
'fixNameField',
'fixVersionField',
'fixRepositoryField',
@@ -252,7 +253,9 @@ class PackageJson {
.replace(/\n/g, eol)
if (fileContent.trim() !== this.#readFileContent.trim()) {
- return await writeFile(this.filename, fileContent)
+ const written = await writeFile(this.filename, fileContent)
+ this.#readFileContent = fileContent
+ return written
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize-data.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize-data.js
new file mode 100644
index 00000000000000..79b0bafbcd3a4d
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize-data.js
@@ -0,0 +1,257 @@
+// Originally normalize-package-data
+
+const url = require('node:url')
+const hostedGitInfo = require('hosted-git-info')
+const validateLicense = require('validate-npm-package-license')
+
+const typos = {
+ dependancies: 'dependencies',
+ dependecies: 'dependencies',
+ depdenencies: 'dependencies',
+ devEependencies: 'devDependencies',
+ depends: 'dependencies',
+ 'dev-dependencies': 'devDependencies',
+ devDependences: 'devDependencies',
+ devDepenencies: 'devDependencies',
+ devdependencies: 'devDependencies',
+ repostitory: 'repository',
+ repo: 'repository',
+ prefereGlobal: 'preferGlobal',
+ hompage: 'homepage',
+ hampage: 'homepage',
+ autohr: 'author',
+ autor: 'author',
+ contributers: 'contributors',
+ publicationConfig: 'publishConfig',
+ script: 'scripts',
+}
+
+const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (description) {
+ // the first block of text before the first heading that isn't the first line heading
+ const lines = description.trim().split('\n')
+ let start = 0
+ // skip initial empty lines and lines that start with #
+ while (lines[start]?.trim().match(/^(#|$)/)) {
+ start++
+ }
+ let end = start + 1
+ // keep going till we get to the end or an empty line
+ while (end < lines.length && lines[end].trim()) {
+ end++
+ }
+ return lines.slice(start, end).join(' ').trim()
+}
+
+function stringifyPerson (person) {
+ if (typeof person !== 'string') {
+ const name = person.name || ''
+ const u = person.url || person.web
+ const wrappedUrl = u ? (' (' + u + ')') : ''
+ const e = person.email || person.mail
+ const wrappedEmail = e ? (' <' + e + '>') : ''
+ person = name + wrappedEmail + wrappedUrl
+ }
+ const matchedName = person.match(/^([^(<]+)/)
+ const matchedUrl = person.match(/\(([^()]+)\)/)
+ const matchedEmail = person.match(/<([^<>]+)>/)
+ const parsed = {}
+ if (matchedName?.[0].trim()) {
+ parsed.name = matchedName[0].trim()
+ }
+ if (matchedEmail) {
+ parsed.email = matchedEmail[1]
+ }
+ if (matchedUrl) {
+ parsed.url = matchedUrl[1]
+ }
+ return parsed
+}
+
+function normalizeData (data, changes) {
+ // fixDescriptionField
+ if (data.description && typeof data.description !== 'string') {
+ changes?.push(`'description' field should be a string`)
+ delete data.description
+ }
+ if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
+ data.description = extractDescription(data.readme)
+ }
+ if (data.description === undefined) {
+ delete data.description
+ }
+ if (!data.description) {
+ changes?.push('No description')
+ }
+
+ // fixModulesField
+ if (data.modules) {
+ changes?.push(`modules field is deprecated`)
+ delete data.modules
+ }
+
+ // fixFilesField
+ const files = data.files
+ if (files && !Array.isArray(files)) {
+ changes?.push(`Invalid 'files' member`)
+ delete data.files
+ } else if (data.files) {
+ data.files = data.files.filter(function (file) {
+ if (!file || typeof file !== 'string') {
+ changes?.push(`Invalid filename in 'files' list: ${file}`)
+ return false
+ } else {
+ return true
+ }
+ })
+ }
+
+ // fixManField
+ if (data.man && typeof data.man === 'string') {
+ data.man = [data.man]
+ }
+
+ // fixBugsField
+ if (!data.bugs && data.repository?.url) {
+ const hosted = hostedGitInfo.fromUrl(data.repository.url)
+ if (hosted && hosted.bugs()) {
+ data.bugs = { url: hosted.bugs() }
+ }
+ } else if (data.bugs) {
+ if (typeof data.bugs === 'string') {
+ if (isEmail(data.bugs)) {
+ data.bugs = { email: data.bugs }
+ /* eslint-disable-next-line node/no-deprecated-api */
+ } else if (url.parse(data.bugs).protocol) {
+ data.bugs = { url: data.bugs }
+ } else {
+ changes?.push(`Bug string field must be url, email, or {email,url}`)
+ }
+ } else {
+ for (const k in data.bugs) {
+ if (['web', 'name'].includes(k)) {
+ changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
+ data.bugs.url = data.bugs[k]
+ delete data.bugs[k]
+ }
+ }
+ const oldBugs = data.bugs
+ data.bugs = {}
+ if (oldBugs.url) {
+ /* eslint-disable-next-line node/no-deprecated-api */
+ if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+ data.bugs.url = oldBugs.url
+ } else {
+ changes?.push('bugs.url field must be a string url. Deleted.')
+ }
+ }
+ if (oldBugs.email) {
+ if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+ data.bugs.email = oldBugs.email
+ } else {
+ changes?.push('bugs.email field must be a string email. Deleted.')
+ }
+ }
+ }
+ if (!data.bugs.email && !data.bugs.url) {
+ delete data.bugs
+ changes?.push('Normalized value of bugs field is an empty object. Deleted.')
+ }
+ }
+ // fixKeywordsField
+ if (typeof data.keywords === 'string') {
+ data.keywords = data.keywords.split(/,\s+/)
+ }
+ if (data.keywords && !Array.isArray(data.keywords)) {
+ delete data.keywords
+ changes?.push(`keywords should be an array of strings`)
+ } else if (data.keywords) {
+ data.keywords = data.keywords.filter(function (kw) {
+ if (typeof kw !== 'string' || !kw) {
+ changes?.push(`keywords should be an array of strings`)
+ return false
+ } else {
+ return true
+ }
+ })
+ }
+ // fixBundleDependenciesField
+ const bdd = 'bundledDependencies'
+ const bd = 'bundleDependencies'
+ if (data[bdd] && !data[bd]) {
+ data[bd] = data[bdd]
+ delete data[bdd]
+ }
+ if (data[bd] && !Array.isArray(data[bd])) {
+ changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
+ delete data[bd]
+ } else if (data[bd]) {
+ data[bd] = data[bd].filter(function (filtered) {
+ if (!filtered || typeof filtered !== 'string') {
+ changes?.push(`Invalid bundleDependencies member: ${filtered}`)
+ return false
+ } else {
+ if (!data.dependencies) {
+ data.dependencies = {}
+ }
+ if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+ changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
+ data.dependencies[filtered] = '*'
+ }
+ return true
+ }
+ })
+ }
+ // fixHomepageField
+ if (!data.homepage && data.repository && data.repository.url) {
+ const hosted = hostedGitInfo.fromUrl(data.repository.url)
+ if (hosted) {
+ data.homepage = hosted.docs()
+ }
+ }
+ if (data.homepage) {
+ if (typeof data.homepage !== 'string') {
+ changes?.push('homepage field must be a string url. Deleted.')
+ delete data.homepage
+ } else {
+ /* eslint-disable-next-line node/no-deprecated-api */
+ if (!url.parse(data.homepage).protocol) {
+ data.homepage = 'http://' + data.homepage
+ }
+ }
+ }
+ // fixReadmeField
+ if (!data.readme) {
+ changes?.push('No README data')
+ data.readme = 'ERROR: No README data found!'
+ }
+ // fixLicenseField
+ const license = data.license || data.licence
+ if (!license) {
+ changes?.push('No license field.')
+ } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
+ changes?.push('license should be a valid SPDX license expression')
+ } else if (!validateLicense(license).validForNewPackages) {
+ changes?.push('license should be a valid SPDX license expression')
+ }
+ // fixPeople
+ if (data.author) {
+ data.author = stringifyPerson(data.author)
+ }
+ ['maintainers', 'contributors'].forEach(function (set) {
+ if (!Array.isArray(data[set])) {
+ return
+ }
+ data[set] = data[set].map(stringifyPerson)
+ })
+ // fixTypos
+ for (const d in typos) {
+ if (Object.prototype.hasOwnProperty.call(data, d)) {
+ changes?.push(`${d} should probably be ${typos[d]}.`)
+ }
+ }
+}
+
+module.exports = { normalizeData }
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
index 3adec0143f445a..845f6753a9a00a 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
@@ -3,6 +3,7 @@ const clean = require('semver/functions/clean')
const fs = require('node:fs/promises')
const path = require('node:path')
const { log } = require('proc-log')
+const moduleBuiltin = require('node:module')
/**
* @type {import('hosted-git-info')}
@@ -144,7 +145,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
// name and version are load bearing so we have to clean them up first
- if (steps.includes('fixNameField') || steps.includes('normalizeData')) {
+ if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
if (!data.name && !strict) {
changes?.push('Missing "name" field was set to an empty string')
data.name = ''
@@ -170,6 +171,13 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
}
+ if (steps.includes('fixName')) {
+ // Check for conflicts with builtin modules
+ if (moduleBuiltin.builtinModules.includes(data.name)) {
+ log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
+ }
+ }
+
if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
// allow "loose" semver 1.0 versions in non-strict mode
// enforce strict semver 2.0 compliance in strict mode
@@ -348,7 +356,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
changes?.push(`"readmeFilename" was set to ${readmeFile}`)
}
if (!data.readme) {
- // this.warn('missingReadme')
data.readme = 'ERROR: No README data found!'
}
}
@@ -488,7 +495,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
// Some steps are isolated so we can do a limited subset of these in `fix`
if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
if (data.repositories) {
- /* eslint-disable-next-line max-len */
changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
data.repository = data.repositories[0]
}
@@ -572,30 +578,10 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase })
}
}
+ // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
if (steps.includes('normalizeData')) {
- const legacyFixer = require('normalize-package-data/lib/fixer.js')
- const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js')
- legacyFixer.warn = function () {
- changes?.push(legacyMakeWarning.apply(null, arguments))
- }
-
- const legacySteps = [
- 'fixDescriptionField',
- 'fixModulesField',
- 'fixFilesField',
- 'fixManField',
- 'fixBugsField',
- 'fixKeywordsField',
- 'fixBundleDependenciesField',
- 'fixHomepageField',
- 'fixReadmeField',
- 'fixLicenseField',
- 'fixPeople',
- 'fixTypos',
- ]
- for (const legacyStep of legacySteps) {
- legacyFixer[legacyStep](data)
- }
+ const { normalizeData } = require('./normalize-data.js')
+ normalizeData(data, changes)
}
// Warn if the bin references don't point to anything. This might be better
diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json
index 97070e27d0d22e..263d67ff3bc5bf 100644
--- a/deps/npm/node_modules/@npmcli/package-json/package.json
+++ b/deps/npm/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
- "version": "6.1.0",
+ "version": "6.2.0",
"description": "Programmatic API to update package.json",
"keywords": [
"npm",
@@ -33,13 +33,13 @@
"glob": "^10.2.2",
"hosted-git-info": "^8.0.0",
"json-parse-even-better-errors": "^4.0.0",
- "normalize-package-data": "^7.0.0",
"proc-log": "^5.0.0",
- "semver": "^7.5.3"
+ "semver": "^7.5.3",
+ "validate-npm-package-license": "^3.0.4"
},
"devDependencies": {
- "@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.5",
+ "@npmcli/eslint-config": "^5.1.0",
+ "@npmcli/template-oss": "4.23.6",
"read-package-json": "^7.0.0",
"read-package-json-fast": "^4.0.0",
"tap": "^16.0.1"
@@ -49,7 +49,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.5",
+ "version": "4.23.6",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/@npmcli/query/package.json b/deps/npm/node_modules/@npmcli/query/package.json
index 14f7fbfaac0168..20660b227834d9 100644
--- a/deps/npm/node_modules/@npmcli/query/package.json
+++ b/deps/npm/node_modules/@npmcli/query/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/query",
- "version": "4.0.0",
+ "version": "4.0.1",
"description": "npm query parser and tools",
"main": "lib/index.js",
"scripts": {
@@ -49,7 +49,7 @@
"tap": "^16.2.0"
},
"dependencies": {
- "postcss-selector-parser": "^6.1.2"
+ "postcss-selector-parser": "^7.0.0"
},
"repository": {
"type": "git",
diff --git a/deps/npm/node_modules/@npmcli/redact/lib/deep-map.js b/deps/npm/node_modules/@npmcli/redact/lib/deep-map.js
index b555cf9fc4c8b8..c14857c2c01b17 100644
--- a/deps/npm/node_modules/@npmcli/redact/lib/deep-map.js
+++ b/deps/npm/node_modules/@npmcli/redact/lib/deep-map.js
@@ -1,22 +1,15 @@
-function filterError (input) {
- return {
- errorType: input.name,
- message: input.message,
- stack: input.stack,
- ...(input.code ? { code: input.code } : {}),
- ...(input.statusCode ? { statusCode: input.statusCode } : {}),
- }
-}
+const { serializeError } = require('./error')
const deepMap = (input, handler = v => v, path = ['$'], seen = new Set([input])) => {
// this is in an effort to maintain bole's error logging behavior
if (path.join('.') === '$' && input instanceof Error) {
- return deepMap({ err: filterError(input) }, handler, path, seen)
+ return deepMap({ err: serializeError(input) }, handler, path, seen)
}
if (input instanceof Error) {
- return deepMap(filterError(input), handler, path, seen)
+ return deepMap(serializeError(input), handler, path, seen)
}
- if (input instanceof Buffer) {
+ // allows for non-node js environments, sush as workers
+ if (typeof Buffer !== 'undefined' && input instanceof Buffer) {
return `[unable to log instanceof buffer]`
}
if (input instanceof Uint8Array) {
diff --git a/deps/npm/node_modules/@npmcli/redact/lib/error.js b/deps/npm/node_modules/@npmcli/redact/lib/error.js
new file mode 100644
index 00000000000000..e374b3902a2853
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/redact/lib/error.js
@@ -0,0 +1,28 @@
+/** takes an error object and serializes it to a plan object */
+function serializeError (input) {
+ if (!(input instanceof Error)) {
+ if (typeof input === 'string') {
+ const error = new Error(`attempted to serialize a non-error, string String, "${input}"`)
+ return serializeError(error)
+ }
+ const error = new Error(`attempted to serialize a non-error, ${typeof input} ${input?.constructor?.name}`)
+ return serializeError(error)
+ }
+ // different error objects store status code differently
+ // AxiosError uses `status`, other services use `statusCode`
+ const statusCode = input.statusCode ?? input.status
+ // CAUTION: what we serialize here gets add to the size of logs
+ return {
+ errorType: input.errorType ?? input.constructor.name,
+ ...(input.message ? { message: input.message } : {}),
+ ...(input.stack ? { stack: input.stack } : {}),
+ // think of this as error code
+ ...(input.code ? { code: input.code } : {}),
+ // think of this as http status code
+ ...(statusCode ? { statusCode } : {}),
+ }
+}
+
+module.exports = {
+ serializeError,
+}
diff --git a/deps/npm/node_modules/@npmcli/redact/lib/matchers.js b/deps/npm/node_modules/@npmcli/redact/lib/matchers.js
index fe9b9071de8a16..854ba8e1cbda14 100644
--- a/deps/npm/node_modules/@npmcli/redact/lib/matchers.js
+++ b/deps/npm/node_modules/@npmcli/redact/lib/matchers.js
@@ -44,6 +44,12 @@ const DEEP_HEADER_SET_COOKIE = {
replacement: '[REDACTED_HEADER_SET_COOKIE]',
}
+const DEEP_HEADER_COOKIE = {
+ type: TYPE_PATH,
+ predicate: ({ path }) => path.endsWith('.headers.cookie'),
+ replacement: '[REDACTED_HEADER_COOKIE]',
+}
+
const REWRITE_REQUEST = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.request'),
@@ -76,6 +82,7 @@ module.exports = {
URL_MATCHER,
DEEP_HEADER_AUTHORIZATION,
DEEP_HEADER_SET_COOKIE,
+ DEEP_HEADER_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
}
diff --git a/deps/npm/node_modules/@npmcli/redact/lib/server.js b/deps/npm/node_modules/@npmcli/redact/lib/server.js
index 669e834da6131d..555e37dcc1f54c 100644
--- a/deps/npm/node_modules/@npmcli/redact/lib/server.js
+++ b/deps/npm/node_modules/@npmcli/redact/lib/server.js
@@ -6,6 +6,7 @@ const {
DEEP_HEADER_SET_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
+ DEEP_HEADER_COOKIE,
} = require('./matchers')
const {
@@ -14,6 +15,8 @@ const {
redactMatchers,
} = require('./utils')
+const { serializeError } = require('./error')
+
const { deepMap } = require('./deep-map')
const _redact = redactMatchers(
@@ -22,6 +25,7 @@ const _redact = redactMatchers(
JSON_WEB_TOKEN,
DEEP_HEADER_AUTHORIZATION,
DEEP_HEADER_SET_COOKIE,
+ DEEP_HEADER_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
redactUrlMatcher(
@@ -31,4 +35,25 @@ const _redact = redactMatchers(
const redact = (input) => deepMap(input, (value, path) => _redact(value, { path }))
-module.exports = { redact }
+/** takes an error returns new error keeping some custom properties */
+function redactError (input) {
+ const { message, ...data } = serializeError(input)
+ const output = new Error(redact(message))
+ return Object.assign(output, redact(data))
+}
+
+/** runs a function within try / catch and throws error wrapped in redactError */
+function redactThrow (func) {
+ if (typeof func !== 'function') {
+ throw new Error('redactThrow expects a function')
+ }
+ return async (...args) => {
+ try {
+ return await func(...args)
+ } catch (error) {
+ throw redactError(error)
+ }
+ }
+}
+
+module.exports = { redact, redactError, redactThrow }
diff --git a/deps/npm/node_modules/@npmcli/redact/package.json b/deps/npm/node_modules/@npmcli/redact/package.json
index 649f82ef5ca89b..b5070113b1330c 100644
--- a/deps/npm/node_modules/@npmcli/redact/package.json
+++ b/deps/npm/node_modules/@npmcli/redact/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/redact",
- "version": "3.0.0",
+ "version": "3.2.2",
"description": "Redact sensitive npm information from output",
"main": "lib/index.js",
"exports": {
@@ -31,7 +31,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.3",
"publish": true
},
"tap": {
@@ -43,7 +43,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.3",
"tap": "^16.3.10"
},
"engines": {
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
index 8a32d7198cb2e2..1c9f02c062f726 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
@@ -1,21 +1,34 @@
/* eslint camelcase: "off" */
const setPATH = require('./set-path.js')
const { resolve } = require('path')
-const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
+
+let npm_config_node_gyp
const makeSpawnArgs = options => {
const {
+ args,
+ binPaths,
+ cmd,
+ env,
event,
+ nodeGyp,
path,
scriptShell = true,
- binPaths,
- env,
stdio,
- cmd,
- args,
stdioString,
} = options
+ if (nodeGyp) {
+ // npm already pulled this from env and passes it in to options
+ npm_config_node_gyp = nodeGyp
+ } else if (env.npm_config_node_gyp) {
+ // legacy mode for standalone user
+ npm_config_node_gyp = env.npm_config_node_gyp
+ } else {
+ // default
+ npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
+ }
+
const spawnEnv = setPATH(path, binPaths, {
// we need to at least save the PATH environment var
...process.env,
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
index 9900c96315f85f..161caebb98d975 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -7,18 +7,19 @@ const isServerPackage = require('./is-server-package.js')
const runScriptPkg = async options => {
const {
- event,
- path,
- scriptShell,
+ args = [],
binPaths = false,
env = {},
- stdio = 'pipe',
+ event,
+ nodeGyp,
+ path,
pkg,
- args = [],
- stdioString,
+ scriptShell,
// how long to wait for a process.kill signal
// only exposed here so that we can make the test go a bit faster.
signalTimeout = 500,
+ stdio = 'pipe',
+ stdioString,
} = options
const { scripts = {}, gypfile } = pkg
@@ -63,14 +64,15 @@ const runScriptPkg = async options => {
}
const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
+ args,
+ binPaths,
+ cmd,
+ env: { ...env, ...packageEnvs(pkg) },
event,
+ nodeGyp,
path,
scriptShell,
- binPaths,
- env: { ...env, ...packageEnvs(pkg) },
stdio,
- cmd,
- args,
stdioString,
})
diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json
index 38a2ac9f87772b..6003a73943ecf0 100644
--- a/deps/npm/node_modules/@npmcli/run-script/package.json
+++ b/deps/npm/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/run-script",
- "version": "9.0.2",
+ "version": "9.1.0",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "GitHub Inc.",
"license": "ISC",
@@ -16,7 +16,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.4",
+ "@npmcli/template-oss": "4.24.1",
"spawk": "^1.8.1",
"tap": "^16.0.1"
},
@@ -42,7 +42,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.4",
+ "version": "4.24.1",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
index 0c367a8384454c..3c9abff8899b5b 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -1,88 +1,58 @@
"use strict";
-/* eslint-disable */
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: envelope.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Signature = exports.Envelope = void 0;
-function createBaseEnvelope() {
- return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
-}
exports.Envelope = {
fromJSON(object) {
return {
payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
- payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
- signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+ payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
+ signatures: globalThis.Array.isArray(object?.signatures)
+ ? object.signatures.map((e) => exports.Signature.fromJSON(e))
+ : [],
};
},
toJSON(message) {
const obj = {};
- message.payload !== undefined &&
- (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
- message.payloadType !== undefined && (obj.payloadType = message.payloadType);
- if (message.signatures) {
- obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+ if (message.payload.length !== 0) {
+ obj.payload = base64FromBytes(message.payload);
+ }
+ if (message.payloadType !== "") {
+ obj.payloadType = message.payloadType;
}
- else {
- obj.signatures = [];
+ if (message.signatures?.length) {
+ obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
}
return obj;
},
};
-function createBaseSignature() {
- return { sig: Buffer.alloc(0), keyid: "" };
-}
exports.Signature = {
fromJSON(object) {
return {
sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
- keyid: isSet(object.keyid) ? String(object.keyid) : "",
+ keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
};
},
toJSON(message) {
const obj = {};
- message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
- message.keyid !== undefined && (obj.keyid = message.keyid);
+ if (message.sig.length !== 0) {
+ obj.sig = base64FromBytes(message.sig);
+ }
+ if (message.keyid !== "") {
+ obj.keyid = message.keyid;
+ }
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
index 073093b8371a8f..46904b7ec64d94 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -1,19 +1,21 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: events.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
/* eslint-disable */
const any_1 = require("./google/protobuf/any");
const timestamp_1 = require("./google/protobuf/timestamp");
-function createBaseCloudEvent() {
- return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
-}
exports.CloudEvent = {
fromJSON(object) {
return {
- id: isSet(object.id) ? String(object.id) : "",
- source: isSet(object.source) ? String(object.source) : "",
- specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
- type: isSet(object.type) ? String(object.type) : "",
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ source: isSet(object.source) ? globalThis.String(object.source) : "",
+ specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
+ type: isSet(object.type) ? globalThis.String(object.type) : "",
attributes: isObject(object.attributes)
? Object.entries(object.attributes).reduce((acc, [key, value]) => {
acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
@@ -23,7 +25,7 @@ exports.CloudEvent = {
data: isSet(object.binaryData)
? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
: isSet(object.textData)
- ? { $case: "textData", textData: String(object.textData) }
+ ? { $case: "textData", textData: globalThis.String(object.textData) }
: isSet(object.protoData)
? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
: undefined,
@@ -31,60 +33,72 @@ exports.CloudEvent = {
},
toJSON(message) {
const obj = {};
- message.id !== undefined && (obj.id = message.id);
- message.source !== undefined && (obj.source = message.source);
- message.specVersion !== undefined && (obj.specVersion = message.specVersion);
- message.type !== undefined && (obj.type = message.type);
- obj.attributes = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.source !== "") {
+ obj.source = message.source;
+ }
+ if (message.specVersion !== "") {
+ obj.specVersion = message.specVersion;
+ }
+ if (message.type !== "") {
+ obj.type = message.type;
+ }
if (message.attributes) {
- Object.entries(message.attributes).forEach(([k, v]) => {
- obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
- });
- }
- message.data?.$case === "binaryData" &&
- (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
- message.data?.$case === "textData" && (obj.textData = message.data?.textData);
- message.data?.$case === "protoData" &&
- (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+ const entries = Object.entries(message.attributes);
+ if (entries.length > 0) {
+ obj.attributes = {};
+ entries.forEach(([k, v]) => {
+ obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+ });
+ }
+ }
+ if (message.data?.$case === "binaryData") {
+ obj.binaryData = base64FromBytes(message.data.binaryData);
+ }
+ else if (message.data?.$case === "textData") {
+ obj.textData = message.data.textData;
+ }
+ else if (message.data?.$case === "protoData") {
+ obj.protoData = any_1.Any.toJSON(message.data.protoData);
+ }
return obj;
},
};
-function createBaseCloudEvent_AttributesEntry() {
- return { key: "", value: undefined };
-}
exports.CloudEvent_AttributesEntry = {
fromJSON(object) {
return {
- key: isSet(object.key) ? String(object.key) : "",
+ key: isSet(object.key) ? globalThis.String(object.key) : "",
value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.key !== undefined && (obj.key = message.key);
- message.value !== undefined &&
- (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+ if (message.key !== "") {
+ obj.key = message.key;
+ }
+ if (message.value !== undefined) {
+ obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
+ }
return obj;
},
};
-function createBaseCloudEvent_CloudEventAttributeValue() {
- return { attr: undefined };
-}
exports.CloudEvent_CloudEventAttributeValue = {
fromJSON(object) {
return {
attr: isSet(object.ceBoolean)
- ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+ ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
: isSet(object.ceInteger)
- ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+ ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
: isSet(object.ceString)
- ? { $case: "ceString", ceString: String(object.ceString) }
+ ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
: isSet(object.ceBytes)
? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
: isSet(object.ceUri)
- ? { $case: "ceUri", ceUri: String(object.ceUri) }
+ ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
: isSet(object.ceUriRef)
- ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+ ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
: isSet(object.ceTimestamp)
? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
: undefined,
@@ -92,86 +106,61 @@ exports.CloudEvent_CloudEventAttributeValue = {
},
toJSON(message) {
const obj = {};
- message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
- message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
- message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
- message.attr?.$case === "ceBytes" &&
- (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
- message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
- message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
- message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+ if (message.attr?.$case === "ceBoolean") {
+ obj.ceBoolean = message.attr.ceBoolean;
+ }
+ else if (message.attr?.$case === "ceInteger") {
+ obj.ceInteger = Math.round(message.attr.ceInteger);
+ }
+ else if (message.attr?.$case === "ceString") {
+ obj.ceString = message.attr.ceString;
+ }
+ else if (message.attr?.$case === "ceBytes") {
+ obj.ceBytes = base64FromBytes(message.attr.ceBytes);
+ }
+ else if (message.attr?.$case === "ceUri") {
+ obj.ceUri = message.attr.ceUri;
+ }
+ else if (message.attr?.$case === "ceUriRef") {
+ obj.ceUriRef = message.attr.ceUriRef;
+ }
+ else if (message.attr?.$case === "ceTimestamp") {
+ obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
+ }
return obj;
},
};
-function createBaseCloudEventBatch() {
- return { events: [] };
-}
exports.CloudEventBatch = {
fromJSON(object) {
- return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+ return {
+ events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
+ };
},
toJSON(message) {
const obj = {};
- if (message.events) {
- obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
- }
- else {
- obj.events = [];
+ if (message.events?.length) {
+ obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
}
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function fromTimestamp(t) {
- let millis = Number(t.seconds) * 1000;
- millis += t.nanos / 1000000;
- return new Date(millis);
+ let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+ millis += (t.nanos || 0) / 1_000_000;
+ return new globalThis.Date(millis);
}
function fromJsonTimestamp(o) {
- if (o instanceof Date) {
+ if (o instanceof globalThis.Date) {
return o;
}
else if (typeof o === "string") {
- return new Date(o);
+ return new globalThis.Date(o);
}
else {
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
index da627499ad7659..14e559a5e0126c 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -1,7 +1,14 @@
"use strict";
-/* eslint-disable */
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: google/api/field_behavior.proto
Object.defineProperty(exports, "__esModule", { value: true });
-exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+exports.FieldBehavior = void 0;
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+/* eslint-disable */
/**
* An indicator of the behavior of a given field (for example, that a field
* is required in requests, or given as output but ignored as input).
@@ -48,11 +55,33 @@ var FieldBehavior;
/**
* UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
* This indicates that the service may provide the elements of the list
- * in any arbitrary order, rather than the order the user originally
+ * in any arbitrary order, rather than the order the user originally
* provided. Additionally, the list's order may or may not be stable.
*/
FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+ /**
+ * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
+ * This indicates that if the user provides the empty value in a request,
+ * a non-empty value will be returned. The user will not be aware of what
+ * non-empty value to expect.
+ */
+ FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
+ /**
+ * IDENTIFIER - Denotes that the field in a resource (a message annotated with
+ * google.api.resource) is used in the resource name to uniquely identify the
+ * resource. For AIP-compliant APIs, this should only be applied to the
+ * `name` field on the resource.
+ *
+ * This behavior should not be applied to references to other resources within
+ * the message.
+ *
+ * The identifier field of resources often have different field behavior
+ * depending on the request it is embedded in (e.g. for Create methods name
+ * is optional and unused, while for Update methods it is required). Instead
+ * of method-specific annotations, only `IDENTIFIER` is required.
+ */
+ FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
+})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
function fieldBehaviorFromJSON(object) {
switch (object) {
case 0:
@@ -76,11 +105,16 @@ function fieldBehaviorFromJSON(object) {
case 6:
case "UNORDERED_LIST":
return FieldBehavior.UNORDERED_LIST;
+ case 7:
+ case "NON_EMPTY_DEFAULT":
+ return FieldBehavior.NON_EMPTY_DEFAULT;
+ case 8:
+ case "IDENTIFIER":
+ return FieldBehavior.IDENTIFIER;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
function fieldBehaviorToJSON(object) {
switch (object) {
case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
@@ -97,23 +131,11 @@ function fieldBehaviorToJSON(object) {
return "IMMUTABLE";
case FieldBehavior.UNORDERED_LIST:
return "UNORDERED_LIST";
+ case FieldBehavior.NON_EMPTY_DEFAULT:
+ return "NON_EMPTY_DEFAULT";
+ case FieldBehavior.IDENTIFIER:
+ return "IDENTIFIER";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
index 6b3f3c97a66476..bc461887e318a0 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -1,64 +1,34 @@
"use strict";
-/* eslint-disable */
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: google/protobuf/any.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Any = void 0;
-function createBaseAny() {
- return { typeUrl: "", value: Buffer.alloc(0) };
-}
exports.Any = {
fromJSON(object) {
return {
- typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+ typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
};
},
toJSON(message) {
const obj = {};
- message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
- message.value !== undefined &&
- (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ if (message.typeUrl !== "") {
+ obj.typeUrl = message.typeUrl;
+ }
+ if (message.value.length !== 0) {
+ obj.value = base64FromBytes(message.value);
+ }
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
index d429aac8460436..a7d7550fc97741 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -1,7 +1,191 @@
"use strict";
-/* eslint-disable */
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: google/protobuf/descriptor.proto
Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
+exports.GeneratedCodeInfo_Annotation = void 0;
+exports.editionFromJSON = editionFromJSON;
+exports.editionToJSON = editionToJSON;
+exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
+exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
+exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
+exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
+exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
+exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
+exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
+exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
+exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
+exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
+exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
+exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
+exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
+exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
+exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
+exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
+exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
+exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
+exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
+exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
+/* eslint-disable */
+/** The full set of known editions. */
+var Edition;
+(function (Edition) {
+ /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
+ Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
+ /**
+ * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
+ * was first introduced. This is effectively an "infinite past".
+ */
+ Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
+ /**
+ * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like
+ * distinct editions. These can't be used to specify the edition of proto
+ * files, but feature definitions must supply proto2/proto3 defaults for
+ * backwards compatibility.
+ */
+ Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
+ Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
+ /**
+ * EDITION_2023 - Editions that have been released. The specific values are arbitrary and
+ * should not be depended on, but they will always be time-ordered for easy
+ * comparison.
+ */
+ Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
+ Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
+ /**
+ * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be
+ * used or relied on outside of tests.
+ */
+ Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
+ Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
+ Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
+ Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
+ Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
+ /**
+ * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only
+ * ever be used by plugins that can expect to never require any changes to
+ * support a new edition.
+ */
+ Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
+})(Edition || (exports.Edition = Edition = {}));
+function editionFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "EDITION_UNKNOWN":
+ return Edition.EDITION_UNKNOWN;
+ case 900:
+ case "EDITION_LEGACY":
+ return Edition.EDITION_LEGACY;
+ case 998:
+ case "EDITION_PROTO2":
+ return Edition.EDITION_PROTO2;
+ case 999:
+ case "EDITION_PROTO3":
+ return Edition.EDITION_PROTO3;
+ case 1000:
+ case "EDITION_2023":
+ return Edition.EDITION_2023;
+ case 1001:
+ case "EDITION_2024":
+ return Edition.EDITION_2024;
+ case 1:
+ case "EDITION_1_TEST_ONLY":
+ return Edition.EDITION_1_TEST_ONLY;
+ case 2:
+ case "EDITION_2_TEST_ONLY":
+ return Edition.EDITION_2_TEST_ONLY;
+ case 99997:
+ case "EDITION_99997_TEST_ONLY":
+ return Edition.EDITION_99997_TEST_ONLY;
+ case 99998:
+ case "EDITION_99998_TEST_ONLY":
+ return Edition.EDITION_99998_TEST_ONLY;
+ case 99999:
+ case "EDITION_99999_TEST_ONLY":
+ return Edition.EDITION_99999_TEST_ONLY;
+ case 2147483647:
+ case "EDITION_MAX":
+ return Edition.EDITION_MAX;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+ }
+}
+function editionToJSON(object) {
+ switch (object) {
+ case Edition.EDITION_UNKNOWN:
+ return "EDITION_UNKNOWN";
+ case Edition.EDITION_LEGACY:
+ return "EDITION_LEGACY";
+ case Edition.EDITION_PROTO2:
+ return "EDITION_PROTO2";
+ case Edition.EDITION_PROTO3:
+ return "EDITION_PROTO3";
+ case Edition.EDITION_2023:
+ return "EDITION_2023";
+ case Edition.EDITION_2024:
+ return "EDITION_2024";
+ case Edition.EDITION_1_TEST_ONLY:
+ return "EDITION_1_TEST_ONLY";
+ case Edition.EDITION_2_TEST_ONLY:
+ return "EDITION_2_TEST_ONLY";
+ case Edition.EDITION_99997_TEST_ONLY:
+ return "EDITION_99997_TEST_ONLY";
+ case Edition.EDITION_99998_TEST_ONLY:
+ return "EDITION_99998_TEST_ONLY";
+ case Edition.EDITION_99999_TEST_ONLY:
+ return "EDITION_99999_TEST_ONLY";
+ case Edition.EDITION_MAX:
+ return "EDITION_MAX";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+ }
+}
+/** The verification state of the extension range. */
+var ExtensionRangeOptions_VerificationState;
+(function (ExtensionRangeOptions_VerificationState) {
+ /** DECLARATION - All the extensions of the range must be declared. */
+ ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
+ ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
+})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
+function extensionRangeOptions_VerificationStateFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "DECLARATION":
+ return ExtensionRangeOptions_VerificationState.DECLARATION;
+ case 1:
+ case "UNVERIFIED":
+ return ExtensionRangeOptions_VerificationState.UNVERIFIED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+ }
+}
+function extensionRangeOptions_VerificationStateToJSON(object) {
+ switch (object) {
+ case ExtensionRangeOptions_VerificationState.DECLARATION:
+ return "DECLARATION";
+ case ExtensionRangeOptions_VerificationState.UNVERIFIED:
+ return "UNVERIFIED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+ }
+}
var FieldDescriptorProto_Type;
(function (FieldDescriptorProto_Type) {
/**
@@ -27,9 +211,10 @@ var FieldDescriptorProto_Type;
FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
/**
* TYPE_GROUP - Tag-delimited aggregate.
- * Group type is deprecated and not supported in proto3. However, Proto3
+ * Group type is deprecated and not supported after google.protobuf. However, Proto3
* implementations should still be able to parse the group wire format and
- * treat group fields as unknown fields.
+ * treat group fields as unknown fields. In Editions, the group wire format
+ * can be enabled via the `message_encoding` feature.
*/
FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
/** TYPE_MESSAGE - Length-delimited aggregate. */
@@ -44,7 +229,7 @@ var FieldDescriptorProto_Type;
FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
/** TYPE_SINT64 - Uses ZigZag encoding. */
FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
function fieldDescriptorProto_TypeFromJSON(object) {
switch (object) {
case 1:
@@ -102,10 +287,9 @@ function fieldDescriptorProto_TypeFromJSON(object) {
case "TYPE_SINT64":
return FieldDescriptorProto_Type.TYPE_SINT64;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
}
}
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
function fieldDescriptorProto_TypeToJSON(object) {
switch (object) {
case FieldDescriptorProto_Type.TYPE_DOUBLE:
@@ -145,46 +329,48 @@ function fieldDescriptorProto_TypeToJSON(object) {
case FieldDescriptorProto_Type.TYPE_SINT64:
return "TYPE_SINT64";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
}
}
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
var FieldDescriptorProto_Label;
(function (FieldDescriptorProto_Label) {
/** LABEL_OPTIONAL - 0 is reserved for errors */
FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
- FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+ /**
+ * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions
+ * it's explicitly prohibited. In Editions, the `field_presence` feature
+ * can be used to get this behavior.
+ */
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
function fieldDescriptorProto_LabelFromJSON(object) {
switch (object) {
case 1:
case "LABEL_OPTIONAL":
return FieldDescriptorProto_Label.LABEL_OPTIONAL;
- case 2:
- case "LABEL_REQUIRED":
- return FieldDescriptorProto_Label.LABEL_REQUIRED;
case 3:
case "LABEL_REPEATED":
return FieldDescriptorProto_Label.LABEL_REPEATED;
+ case 2:
+ case "LABEL_REQUIRED":
+ return FieldDescriptorProto_Label.LABEL_REQUIRED;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
}
}
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
function fieldDescriptorProto_LabelToJSON(object) {
switch (object) {
case FieldDescriptorProto_Label.LABEL_OPTIONAL:
return "LABEL_OPTIONAL";
- case FieldDescriptorProto_Label.LABEL_REQUIRED:
- return "LABEL_REQUIRED";
case FieldDescriptorProto_Label.LABEL_REPEATED:
return "LABEL_REPEATED";
+ case FieldDescriptorProto_Label.LABEL_REQUIRED:
+ return "LABEL_REQUIRED";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
}
}
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
/** Generated classes can be optimized for speed or code size. */
var FileOptions_OptimizeMode;
(function (FileOptions_OptimizeMode) {
@@ -194,7 +380,7 @@ var FileOptions_OptimizeMode;
FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
/** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
function fileOptions_OptimizeModeFromJSON(object) {
switch (object) {
case 1:
@@ -207,10 +393,9 @@ function fileOptions_OptimizeModeFromJSON(object) {
case "LITE_RUNTIME":
return FileOptions_OptimizeMode.LITE_RUNTIME;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
}
}
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
function fileOptions_OptimizeModeToJSON(object) {
switch (object) {
case FileOptions_OptimizeMode.SPEED:
@@ -220,17 +405,24 @@ function fileOptions_OptimizeModeToJSON(object) {
case FileOptions_OptimizeMode.LITE_RUNTIME:
return "LITE_RUNTIME";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
}
}
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
var FieldOptions_CType;
(function (FieldOptions_CType) {
/** STRING - Default mode. */
FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+ /**
+ * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
+ * "bytes". It indicates that in C++, the data should be stored in a Cord
+ * instead of a string. For very large strings, this may reduce memory
+ * fragmentation. It may also allow better performance when parsing from a
+ * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+ * alias the original buffer.
+ */
FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
function fieldOptions_CTypeFromJSON(object) {
switch (object) {
case 0:
@@ -243,10 +435,9 @@ function fieldOptions_CTypeFromJSON(object) {
case "STRING_PIECE":
return FieldOptions_CType.STRING_PIECE;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
}
}
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
function fieldOptions_CTypeToJSON(object) {
switch (object) {
case FieldOptions_CType.STRING:
@@ -256,10 +447,9 @@ function fieldOptions_CTypeToJSON(object) {
case FieldOptions_CType.STRING_PIECE:
return "STRING_PIECE";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
}
}
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
var FieldOptions_JSType;
(function (FieldOptions_JSType) {
/** JS_NORMAL - Use the default type. */
@@ -268,7 +458,7 @@ var FieldOptions_JSType;
FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
/** JS_NUMBER - Use JavaScript numbers. */
FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
function fieldOptions_JSTypeFromJSON(object) {
switch (object) {
case 0:
@@ -281,10 +471,9 @@ function fieldOptions_JSTypeFromJSON(object) {
case "JS_NUMBER":
return FieldOptions_JSType.JS_NUMBER;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
}
}
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
function fieldOptions_JSTypeToJSON(object) {
switch (object) {
case FieldOptions_JSType.JS_NORMAL:
@@ -294,10 +483,123 @@ function fieldOptions_JSTypeToJSON(object) {
case FieldOptions_JSType.JS_NUMBER:
return "JS_NUMBER";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ }
+}
+/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
+var FieldOptions_OptionRetention;
+(function (FieldOptions_OptionRetention) {
+ FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
+ FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
+ FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
+})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
+function fieldOptions_OptionRetentionFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "RETENTION_UNKNOWN":
+ return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
+ case 1:
+ case "RETENTION_RUNTIME":
+ return FieldOptions_OptionRetention.RETENTION_RUNTIME;
+ case 2:
+ case "RETENTION_SOURCE":
+ return FieldOptions_OptionRetention.RETENTION_SOURCE;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+ }
+}
+function fieldOptions_OptionRetentionToJSON(object) {
+ switch (object) {
+ case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
+ return "RETENTION_UNKNOWN";
+ case FieldOptions_OptionRetention.RETENTION_RUNTIME:
+ return "RETENTION_RUNTIME";
+ case FieldOptions_OptionRetention.RETENTION_SOURCE:
+ return "RETENTION_SOURCE";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+ }
+}
+/**
+ * This indicates the types of entities that the field may apply to when used
+ * as an option. If it is unset, then the field may be freely used as an
+ * option on any kind of entity.
+ */
+var FieldOptions_OptionTargetType;
+(function (FieldOptions_OptionTargetType) {
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
+ FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
+})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
+function fieldOptions_OptionTargetTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "TARGET_TYPE_UNKNOWN":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
+ case 1:
+ case "TARGET_TYPE_FILE":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
+ case 2:
+ case "TARGET_TYPE_EXTENSION_RANGE":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
+ case 3:
+ case "TARGET_TYPE_MESSAGE":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
+ case 4:
+ case "TARGET_TYPE_FIELD":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
+ case 5:
+ case "TARGET_TYPE_ONEOF":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
+ case 6:
+ case "TARGET_TYPE_ENUM":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
+ case 7:
+ case "TARGET_TYPE_ENUM_ENTRY":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
+ case 8:
+ case "TARGET_TYPE_SERVICE":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
+ case 9:
+ case "TARGET_TYPE_METHOD":
+ return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+ }
+}
+function fieldOptions_OptionTargetTypeToJSON(object) {
+ switch (object) {
+ case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
+ return "TARGET_TYPE_UNKNOWN";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
+ return "TARGET_TYPE_FILE";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
+ return "TARGET_TYPE_EXTENSION_RANGE";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
+ return "TARGET_TYPE_MESSAGE";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
+ return "TARGET_TYPE_FIELD";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
+ return "TARGET_TYPE_ONEOF";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
+ return "TARGET_TYPE_ENUM";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
+ return "TARGET_TYPE_ENUM_ENTRY";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
+ return "TARGET_TYPE_SERVICE";
+ case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
+ return "TARGET_TYPE_METHOD";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
}
}
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
/**
* Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
* or neither? HTTP based RPC implementation may choose GET verb for safe
@@ -310,7 +612,7 @@ var MethodOptions_IdempotencyLevel;
MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
/** IDEMPOTENT - idempotent, but may have side effects */
MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
function methodOptions_IdempotencyLevelFromJSON(object) {
switch (object) {
case 0:
@@ -323,10 +625,9 @@ function methodOptions_IdempotencyLevelFromJSON(object) {
case "IDEMPOTENT":
return MethodOptions_IdempotencyLevel.IDEMPOTENT;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
}
}
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
function methodOptions_IdempotencyLevelToJSON(object) {
switch (object) {
case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
@@ -336,972 +637,1405 @@ function methodOptions_IdempotencyLevelToJSON(object) {
case MethodOptions_IdempotencyLevel.IDEMPOTENT:
return "IDEMPOTENT";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
}
}
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-function createBaseFileDescriptorSet() {
- return { file: [] };
+var FeatureSet_FieldPresence;
+(function (FeatureSet_FieldPresence) {
+ FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
+ FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
+ FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
+ FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
+})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
+function featureSet_FieldPresenceFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "FIELD_PRESENCE_UNKNOWN":
+ return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
+ case 1:
+ case "EXPLICIT":
+ return FeatureSet_FieldPresence.EXPLICIT;
+ case 2:
+ case "IMPLICIT":
+ return FeatureSet_FieldPresence.IMPLICIT;
+ case 3:
+ case "LEGACY_REQUIRED":
+ return FeatureSet_FieldPresence.LEGACY_REQUIRED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+ }
+}
+function featureSet_FieldPresenceToJSON(object) {
+ switch (object) {
+ case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
+ return "FIELD_PRESENCE_UNKNOWN";
+ case FeatureSet_FieldPresence.EXPLICIT:
+ return "EXPLICIT";
+ case FeatureSet_FieldPresence.IMPLICIT:
+ return "IMPLICIT";
+ case FeatureSet_FieldPresence.LEGACY_REQUIRED:
+ return "LEGACY_REQUIRED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+ }
+}
+var FeatureSet_EnumType;
+(function (FeatureSet_EnumType) {
+ FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
+ FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
+ FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
+})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
+function featureSet_EnumTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "ENUM_TYPE_UNKNOWN":
+ return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
+ case 1:
+ case "OPEN":
+ return FeatureSet_EnumType.OPEN;
+ case 2:
+ case "CLOSED":
+ return FeatureSet_EnumType.CLOSED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+ }
+}
+function featureSet_EnumTypeToJSON(object) {
+ switch (object) {
+ case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
+ return "ENUM_TYPE_UNKNOWN";
+ case FeatureSet_EnumType.OPEN:
+ return "OPEN";
+ case FeatureSet_EnumType.CLOSED:
+ return "CLOSED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+ }
+}
+var FeatureSet_RepeatedFieldEncoding;
+(function (FeatureSet_RepeatedFieldEncoding) {
+ FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
+ FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
+ FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
+})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
+function featureSet_RepeatedFieldEncodingFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "REPEATED_FIELD_ENCODING_UNKNOWN":
+ return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
+ case 1:
+ case "PACKED":
+ return FeatureSet_RepeatedFieldEncoding.PACKED;
+ case 2:
+ case "EXPANDED":
+ return FeatureSet_RepeatedFieldEncoding.EXPANDED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+ }
+}
+function featureSet_RepeatedFieldEncodingToJSON(object) {
+ switch (object) {
+ case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
+ return "REPEATED_FIELD_ENCODING_UNKNOWN";
+ case FeatureSet_RepeatedFieldEncoding.PACKED:
+ return "PACKED";
+ case FeatureSet_RepeatedFieldEncoding.EXPANDED:
+ return "EXPANDED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+ }
+}
+var FeatureSet_Utf8Validation;
+(function (FeatureSet_Utf8Validation) {
+ FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
+ FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
+ FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
+})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
+function featureSet_Utf8ValidationFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "UTF8_VALIDATION_UNKNOWN":
+ return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
+ case 2:
+ case "VERIFY":
+ return FeatureSet_Utf8Validation.VERIFY;
+ case 3:
+ case "NONE":
+ return FeatureSet_Utf8Validation.NONE;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+ }
+}
+function featureSet_Utf8ValidationToJSON(object) {
+ switch (object) {
+ case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
+ return "UTF8_VALIDATION_UNKNOWN";
+ case FeatureSet_Utf8Validation.VERIFY:
+ return "VERIFY";
+ case FeatureSet_Utf8Validation.NONE:
+ return "NONE";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+ }
+}
+var FeatureSet_MessageEncoding;
+(function (FeatureSet_MessageEncoding) {
+ FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
+ FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
+ FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
+})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
+function featureSet_MessageEncodingFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "MESSAGE_ENCODING_UNKNOWN":
+ return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
+ case 1:
+ case "LENGTH_PREFIXED":
+ return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
+ case 2:
+ case "DELIMITED":
+ return FeatureSet_MessageEncoding.DELIMITED;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+ }
+}
+function featureSet_MessageEncodingToJSON(object) {
+ switch (object) {
+ case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
+ return "MESSAGE_ENCODING_UNKNOWN";
+ case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
+ return "LENGTH_PREFIXED";
+ case FeatureSet_MessageEncoding.DELIMITED:
+ return "DELIMITED";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+ }
+}
+var FeatureSet_JsonFormat;
+(function (FeatureSet_JsonFormat) {
+ FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
+ FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
+ FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
+})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
+function featureSet_JsonFormatFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "JSON_FORMAT_UNKNOWN":
+ return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
+ case 1:
+ case "ALLOW":
+ return FeatureSet_JsonFormat.ALLOW;
+ case 2:
+ case "LEGACY_BEST_EFFORT":
+ return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+ }
+}
+function featureSet_JsonFormatToJSON(object) {
+ switch (object) {
+ case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
+ return "JSON_FORMAT_UNKNOWN";
+ case FeatureSet_JsonFormat.ALLOW:
+ return "ALLOW";
+ case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
+ return "LEGACY_BEST_EFFORT";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+ }
+}
+var FeatureSet_EnforceNamingStyle;
+(function (FeatureSet_EnforceNamingStyle) {
+ FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
+ FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
+ FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
+})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
+function featureSet_EnforceNamingStyleFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "ENFORCE_NAMING_STYLE_UNKNOWN":
+ return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
+ case 1:
+ case "STYLE2024":
+ return FeatureSet_EnforceNamingStyle.STYLE2024;
+ case 2:
+ case "STYLE_LEGACY":
+ return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+ }
+}
+function featureSet_EnforceNamingStyleToJSON(object) {
+ switch (object) {
+ case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
+ return "ENFORCE_NAMING_STYLE_UNKNOWN";
+ case FeatureSet_EnforceNamingStyle.STYLE2024:
+ return "STYLE2024";
+ case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
+ return "STYLE_LEGACY";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+ }
+}
+/**
+ * Represents the identified object's effect on the element in the original
+ * .proto file.
+ */
+var GeneratedCodeInfo_Annotation_Semantic;
+(function (GeneratedCodeInfo_Annotation_Semantic) {
+ /** NONE - There is no effect or the effect is indescribable. */
+ GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
+ /** SET - The element is set or otherwise mutated. */
+ GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
+ /** ALIAS - An alias to the element is returned. */
+ GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
+})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
+function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "NONE":
+ return GeneratedCodeInfo_Annotation_Semantic.NONE;
+ case 1:
+ case "SET":
+ return GeneratedCodeInfo_Annotation_Semantic.SET;
+ case 2:
+ case "ALIAS":
+ return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+ }
+}
+function generatedCodeInfo_Annotation_SemanticToJSON(object) {
+ switch (object) {
+ case GeneratedCodeInfo_Annotation_Semantic.NONE:
+ return "NONE";
+ case GeneratedCodeInfo_Annotation_Semantic.SET:
+ return "SET";
+ case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
+ return "ALIAS";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+ }
}
exports.FileDescriptorSet = {
fromJSON(object) {
- return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+ return {
+ file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
+ };
},
toJSON(message) {
const obj = {};
- if (message.file) {
- obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
- }
- else {
- obj.file = [];
+ if (message.file?.length) {
+ obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
}
return obj;
},
};
-function createBaseFileDescriptorProto() {
- return {
- name: "",
- package: "",
- dependency: [],
- publicDependency: [],
- weakDependency: [],
- messageType: [],
- enumType: [],
- service: [],
- extension: [],
- options: undefined,
- sourceCodeInfo: undefined,
- syntax: "",
- };
-}
exports.FileDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- package: isSet(object.package) ? String(object.package) : "",
- dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
- publicDependency: Array.isArray(object?.publicDependency)
- ? object.publicDependency.map((e) => Number(e))
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ package: isSet(object.package) ? globalThis.String(object.package) : "",
+ dependency: globalThis.Array.isArray(object?.dependency)
+ ? object.dependency.map((e) => globalThis.String(e))
+ : [],
+ publicDependency: globalThis.Array.isArray(object?.publicDependency)
+ ? object.publicDependency.map((e) => globalThis.Number(e))
+ : [],
+ weakDependency: globalThis.Array.isArray(object?.weakDependency)
+ ? object.weakDependency.map((e) => globalThis.Number(e))
: [],
- weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
- messageType: Array.isArray(object?.messageType)
+ messageType: globalThis.Array.isArray(object?.messageType)
? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
: [],
- enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
- service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
- extension: Array.isArray(object?.extension)
+ enumType: globalThis.Array.isArray(object?.enumType)
+ ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+ : [],
+ service: globalThis.Array.isArray(object?.service)
+ ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
+ : [],
+ extension: globalThis.Array.isArray(object?.extension)
? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
: [],
options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
- syntax: isSet(object.syntax) ? String(object.syntax) : "",
+ syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
+ edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- message.package !== undefined && (obj.package = message.package);
- if (message.dependency) {
- obj.dependency = message.dependency.map((e) => e);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
}
- else {
- obj.dependency = [];
+ if (message.package !== undefined && message.package !== "") {
+ obj.package = message.package;
}
- if (message.publicDependency) {
- obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+ if (message.dependency?.length) {
+ obj.dependency = message.dependency;
}
- else {
- obj.publicDependency = [];
+ if (message.publicDependency?.length) {
+ obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
}
- if (message.weakDependency) {
+ if (message.weakDependency?.length) {
obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
}
- else {
- obj.weakDependency = [];
+ if (message.messageType?.length) {
+ obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
}
- if (message.messageType) {
- obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+ if (message.enumType?.length) {
+ obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
}
- else {
- obj.messageType = [];
+ if (message.service?.length) {
+ obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
}
- if (message.enumType) {
- obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ if (message.extension?.length) {
+ obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
}
- else {
- obj.enumType = [];
+ if (message.options !== undefined) {
+ obj.options = exports.FileOptions.toJSON(message.options);
}
- if (message.service) {
- obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+ if (message.sourceCodeInfo !== undefined) {
+ obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
}
- else {
- obj.service = [];
+ if (message.syntax !== undefined && message.syntax !== "") {
+ obj.syntax = message.syntax;
}
- if (message.extension) {
- obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ if (message.edition !== undefined && message.edition !== 0) {
+ obj.edition = editionToJSON(message.edition);
}
- else {
- obj.extension = [];
- }
- message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
- message.sourceCodeInfo !== undefined &&
- (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
- message.syntax !== undefined && (obj.syntax = message.syntax);
return obj;
},
};
-function createBaseDescriptorProto() {
- return {
- name: "",
- field: [],
- extension: [],
- nestedType: [],
- enumType: [],
- extensionRange: [],
- oneofDecl: [],
- options: undefined,
- reservedRange: [],
- reservedName: [],
- };
-}
exports.DescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
- extension: Array.isArray(object?.extension)
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ field: globalThis.Array.isArray(object?.field)
+ ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+ : [],
+ extension: globalThis.Array.isArray(object?.extension)
? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
: [],
- nestedType: Array.isArray(object?.nestedType)
+ nestedType: globalThis.Array.isArray(object?.nestedType)
? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
: [],
- enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
- extensionRange: Array.isArray(object?.extensionRange)
+ enumType: globalThis.Array.isArray(object?.enumType)
+ ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+ : [],
+ extensionRange: globalThis.Array.isArray(object?.extensionRange)
? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
: [],
- oneofDecl: Array.isArray(object?.oneofDecl)
+ oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
: [],
options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
- reservedRange: Array.isArray(object?.reservedRange)
+ reservedRange: globalThis.Array.isArray(object?.reservedRange)
? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
: [],
- reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+ reservedName: globalThis.Array.isArray(object?.reservedName)
+ ? object.reservedName.map((e) => globalThis.String(e))
+ : [],
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- if (message.field) {
- obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
- }
- else {
- obj.field = [];
- }
- if (message.extension) {
- obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
- }
- else {
- obj.extension = [];
- }
- if (message.nestedType) {
- obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
- }
- else {
- obj.nestedType = [];
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
}
- if (message.enumType) {
- obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ if (message.field?.length) {
+ obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
}
- else {
- obj.enumType = [];
+ if (message.extension?.length) {
+ obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
}
- if (message.extensionRange) {
- obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+ if (message.nestedType?.length) {
+ obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
}
- else {
- obj.extensionRange = [];
+ if (message.enumType?.length) {
+ obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
}
- if (message.oneofDecl) {
- obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+ if (message.extensionRange?.length) {
+ obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
}
- else {
- obj.oneofDecl = [];
+ if (message.oneofDecl?.length) {
+ obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
}
- message.options !== undefined &&
- (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
- if (message.reservedRange) {
- obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+ if (message.options !== undefined) {
+ obj.options = exports.MessageOptions.toJSON(message.options);
}
- else {
- obj.reservedRange = [];
+ if (message.reservedRange?.length) {
+ obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
}
- if (message.reservedName) {
- obj.reservedName = message.reservedName.map((e) => e);
- }
- else {
- obj.reservedName = [];
+ if (message.reservedName?.length) {
+ obj.reservedName = message.reservedName;
}
return obj;
},
};
-function createBaseDescriptorProto_ExtensionRange() {
- return { start: 0, end: 0, options: undefined };
-}
exports.DescriptorProto_ExtensionRange = {
fromJSON(object) {
return {
- start: isSet(object.start) ? Number(object.start) : 0,
- end: isSet(object.end) ? Number(object.end) : 0,
+ start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+ end: isSet(object.end) ? globalThis.Number(object.end) : 0,
options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.start !== undefined && (obj.start = Math.round(message.start));
- message.end !== undefined && (obj.end = Math.round(message.end));
- message.options !== undefined &&
- (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+ if (message.start !== undefined && message.start !== 0) {
+ obj.start = Math.round(message.start);
+ }
+ if (message.end !== undefined && message.end !== 0) {
+ obj.end = Math.round(message.end);
+ }
+ if (message.options !== undefined) {
+ obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
+ }
return obj;
},
};
-function createBaseDescriptorProto_ReservedRange() {
- return { start: 0, end: 0 };
-}
exports.DescriptorProto_ReservedRange = {
fromJSON(object) {
- return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ return {
+ start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+ end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+ };
},
toJSON(message) {
const obj = {};
- message.start !== undefined && (obj.start = Math.round(message.start));
- message.end !== undefined && (obj.end = Math.round(message.end));
+ if (message.start !== undefined && message.start !== 0) {
+ obj.start = Math.round(message.start);
+ }
+ if (message.end !== undefined && message.end !== 0) {
+ obj.end = Math.round(message.end);
+ }
return obj;
},
};
-function createBaseExtensionRangeOptions() {
- return { uninterpretedOption: [] };
-}
exports.ExtensionRangeOptions = {
fromJSON(object) {
return {
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
+ declaration: globalThis.Array.isArray(object?.declaration)
+ ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
+ : [],
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ verification: isSet(object.verification)
+ ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
+ : 1,
};
},
toJSON(message) {
const obj = {};
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+ }
+ if (message.declaration?.length) {
+ obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
}
- else {
- obj.uninterpretedOption = [];
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.verification !== undefined && message.verification !== 1) {
+ obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
+ }
+ return obj;
+ },
+};
+exports.ExtensionRangeOptions_Declaration = {
+ fromJSON(object) {
+ return {
+ number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+ fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
+ type: isSet(object.type) ? globalThis.String(object.type) : "",
+ reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
+ repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.number !== undefined && message.number !== 0) {
+ obj.number = Math.round(message.number);
+ }
+ if (message.fullName !== undefined && message.fullName !== "") {
+ obj.fullName = message.fullName;
+ }
+ if (message.type !== undefined && message.type !== "") {
+ obj.type = message.type;
+ }
+ if (message.reserved !== undefined && message.reserved !== false) {
+ obj.reserved = message.reserved;
+ }
+ if (message.repeated !== undefined && message.repeated !== false) {
+ obj.repeated = message.repeated;
}
return obj;
},
};
-function createBaseFieldDescriptorProto() {
- return {
- name: "",
- number: 0,
- label: 1,
- type: 1,
- typeName: "",
- extendee: "",
- defaultValue: "",
- oneofIndex: 0,
- jsonName: "",
- options: undefined,
- proto3Optional: false,
- };
-}
exports.FieldDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- number: isSet(object.number) ? Number(object.number) : 0,
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ number: isSet(object.number) ? globalThis.Number(object.number) : 0,
label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
- typeName: isSet(object.typeName) ? String(object.typeName) : "",
- extendee: isSet(object.extendee) ? String(object.extendee) : "",
- defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
- oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
- jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+ typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
+ extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
+ defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
+ oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
+ jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
- proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+ proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- message.number !== undefined && (obj.number = Math.round(message.number));
- message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
- message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
- message.typeName !== undefined && (obj.typeName = message.typeName);
- message.extendee !== undefined && (obj.extendee = message.extendee);
- message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
- message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
- message.jsonName !== undefined && (obj.jsonName = message.jsonName);
- message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
- message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.number !== undefined && message.number !== 0) {
+ obj.number = Math.round(message.number);
+ }
+ if (message.label !== undefined && message.label !== 1) {
+ obj.label = fieldDescriptorProto_LabelToJSON(message.label);
+ }
+ if (message.type !== undefined && message.type !== 1) {
+ obj.type = fieldDescriptorProto_TypeToJSON(message.type);
+ }
+ if (message.typeName !== undefined && message.typeName !== "") {
+ obj.typeName = message.typeName;
+ }
+ if (message.extendee !== undefined && message.extendee !== "") {
+ obj.extendee = message.extendee;
+ }
+ if (message.defaultValue !== undefined && message.defaultValue !== "") {
+ obj.defaultValue = message.defaultValue;
+ }
+ if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
+ obj.oneofIndex = Math.round(message.oneofIndex);
+ }
+ if (message.jsonName !== undefined && message.jsonName !== "") {
+ obj.jsonName = message.jsonName;
+ }
+ if (message.options !== undefined) {
+ obj.options = exports.FieldOptions.toJSON(message.options);
+ }
+ if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
+ obj.proto3Optional = message.proto3Optional;
+ }
return obj;
},
};
-function createBaseOneofDescriptorProto() {
- return { name: "", options: undefined };
-}
exports.OneofDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.options !== undefined) {
+ obj.options = exports.OneofOptions.toJSON(message.options);
+ }
return obj;
},
};
-function createBaseEnumDescriptorProto() {
- return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
-}
exports.EnumDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ value: globalThis.Array.isArray(object?.value)
+ ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
+ : [],
options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
- reservedRange: Array.isArray(object?.reservedRange)
+ reservedRange: globalThis.Array.isArray(object?.reservedRange)
? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
: [],
- reservedName: Array.isArray(object?.reservedName)
- ? object.reservedName.map((e) => String(e))
+ reservedName: globalThis.Array.isArray(object?.reservedName)
+ ? object.reservedName.map((e) => globalThis.String(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- if (message.value) {
- obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
}
- else {
- obj.value = [];
+ if (message.value?.length) {
+ obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
}
- message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
- if (message.reservedRange) {
- obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+ if (message.options !== undefined) {
+ obj.options = exports.EnumOptions.toJSON(message.options);
}
- else {
- obj.reservedRange = [];
+ if (message.reservedRange?.length) {
+ obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
}
- if (message.reservedName) {
- obj.reservedName = message.reservedName.map((e) => e);
- }
- else {
- obj.reservedName = [];
+ if (message.reservedName?.length) {
+ obj.reservedName = message.reservedName;
}
return obj;
},
};
-function createBaseEnumDescriptorProto_EnumReservedRange() {
- return { start: 0, end: 0 };
-}
exports.EnumDescriptorProto_EnumReservedRange = {
fromJSON(object) {
- return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ return {
+ start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+ end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+ };
},
toJSON(message) {
const obj = {};
- message.start !== undefined && (obj.start = Math.round(message.start));
- message.end !== undefined && (obj.end = Math.round(message.end));
+ if (message.start !== undefined && message.start !== 0) {
+ obj.start = Math.round(message.start);
+ }
+ if (message.end !== undefined && message.end !== 0) {
+ obj.end = Math.round(message.end);
+ }
return obj;
},
};
-function createBaseEnumValueDescriptorProto() {
- return { name: "", number: 0, options: undefined };
-}
exports.EnumValueDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- number: isSet(object.number) ? Number(object.number) : 0,
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ number: isSet(object.number) ? globalThis.Number(object.number) : 0,
options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- message.number !== undefined && (obj.number = Math.round(message.number));
- message.options !== undefined &&
- (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.number !== undefined && message.number !== 0) {
+ obj.number = Math.round(message.number);
+ }
+ if (message.options !== undefined) {
+ obj.options = exports.EnumValueOptions.toJSON(message.options);
+ }
return obj;
},
};
-function createBaseServiceDescriptorProto() {
- return { name: "", method: [], options: undefined };
-}
exports.ServiceDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ method: globalThis.Array.isArray(object?.method)
+ ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
+ : [],
options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- if (message.method) {
- obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.method?.length) {
+ obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
}
- else {
- obj.method = [];
+ if (message.options !== undefined) {
+ obj.options = exports.ServiceOptions.toJSON(message.options);
}
- message.options !== undefined &&
- (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
return obj;
},
};
-function createBaseMethodDescriptorProto() {
- return {
- name: "",
- inputType: "",
- outputType: "",
- options: undefined,
- clientStreaming: false,
- serverStreaming: false,
- };
-}
exports.MethodDescriptorProto = {
fromJSON(object) {
return {
- name: isSet(object.name) ? String(object.name) : "",
- inputType: isSet(object.inputType) ? String(object.inputType) : "",
- outputType: isSet(object.outputType) ? String(object.outputType) : "",
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
+ outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
- clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
- serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+ clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
+ serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
};
},
toJSON(message) {
const obj = {};
- message.name !== undefined && (obj.name = message.name);
- message.inputType !== undefined && (obj.inputType = message.inputType);
- message.outputType !== undefined && (obj.outputType = message.outputType);
- message.options !== undefined &&
- (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
- message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
- message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+ if (message.name !== undefined && message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.inputType !== undefined && message.inputType !== "") {
+ obj.inputType = message.inputType;
+ }
+ if (message.outputType !== undefined && message.outputType !== "") {
+ obj.outputType = message.outputType;
+ }
+ if (message.options !== undefined) {
+ obj.options = exports.MethodOptions.toJSON(message.options);
+ }
+ if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
+ obj.clientStreaming = message.clientStreaming;
+ }
+ if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
+ obj.serverStreaming = message.serverStreaming;
+ }
return obj;
},
};
-function createBaseFileOptions() {
- return {
- javaPackage: "",
- javaOuterClassname: "",
- javaMultipleFiles: false,
- javaGenerateEqualsAndHash: false,
- javaStringCheckUtf8: false,
- optimizeFor: 1,
- goPackage: "",
- ccGenericServices: false,
- javaGenericServices: false,
- pyGenericServices: false,
- phpGenericServices: false,
- deprecated: false,
- ccEnableArenas: false,
- objcClassPrefix: "",
- csharpNamespace: "",
- swiftPrefix: "",
- phpClassPrefix: "",
- phpNamespace: "",
- phpMetadataNamespace: "",
- rubyPackage: "",
- uninterpretedOption: [],
- };
-}
exports.FileOptions = {
fromJSON(object) {
return {
- javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
- javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
- javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+ javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
+ javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
+ javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
- ? Boolean(object.javaGenerateEqualsAndHash)
+ ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
: false,
- javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+ javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
- goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
- ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
- javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
- pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
- phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
- objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
- csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
- swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
- phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
- phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
- phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
- rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
+ ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
+ javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
+ pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
+ objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
+ csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
+ swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
+ phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
+ phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
+ phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
+ rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
- message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
- message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
- message.javaGenerateEqualsAndHash !== undefined &&
- (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
- message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
- message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
- message.goPackage !== undefined && (obj.goPackage = message.goPackage);
- message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
- message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
- message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
- message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
- message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
- message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
- message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
- message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
- message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
- message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
- message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
- }
- else {
- obj.uninterpretedOption = [];
+ if (message.javaPackage !== undefined && message.javaPackage !== "") {
+ obj.javaPackage = message.javaPackage;
+ }
+ if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
+ obj.javaOuterClassname = message.javaOuterClassname;
+ }
+ if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
+ obj.javaMultipleFiles = message.javaMultipleFiles;
+ }
+ if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
+ obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
+ }
+ if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
+ obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
+ }
+ if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
+ obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
+ }
+ if (message.goPackage !== undefined && message.goPackage !== "") {
+ obj.goPackage = message.goPackage;
+ }
+ if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
+ obj.ccGenericServices = message.ccGenericServices;
+ }
+ if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
+ obj.javaGenericServices = message.javaGenericServices;
+ }
+ if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
+ obj.pyGenericServices = message.pyGenericServices;
+ }
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
+ obj.ccEnableArenas = message.ccEnableArenas;
+ }
+ if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
+ obj.objcClassPrefix = message.objcClassPrefix;
+ }
+ if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
+ obj.csharpNamespace = message.csharpNamespace;
+ }
+ if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
+ obj.swiftPrefix = message.swiftPrefix;
+ }
+ if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
+ obj.phpClassPrefix = message.phpClassPrefix;
+ }
+ if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
+ obj.phpNamespace = message.phpNamespace;
+ }
+ if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
+ obj.phpMetadataNamespace = message.phpMetadataNamespace;
+ }
+ if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
+ obj.rubyPackage = message.rubyPackage;
+ }
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseMessageOptions() {
- return {
- messageSetWireFormat: false,
- noStandardDescriptorAccessor: false,
- deprecated: false,
- mapEntry: false,
- uninterpretedOption: [],
- };
-}
exports.MessageOptions = {
fromJSON(object) {
return {
- messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+ messageSetWireFormat: isSet(object.messageSetWireFormat)
+ ? globalThis.Boolean(object.messageSetWireFormat)
+ : false,
noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
- ? Boolean(object.noStandardDescriptorAccessor)
+ ? globalThis.Boolean(object.noStandardDescriptorAccessor)
+ : false,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
+ deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+ ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
: false,
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
- message.noStandardDescriptorAccessor !== undefined &&
- (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
+ obj.messageSetWireFormat = message.messageSetWireFormat;
}
- else {
- obj.uninterpretedOption = [];
+ if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
+ obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
+ }
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.mapEntry !== undefined && message.mapEntry !== false) {
+ obj.mapEntry = message.mapEntry;
+ }
+ if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+ obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+ }
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseFieldOptions() {
- return {
- ctype: 0,
- packed: false,
- jstype: 0,
- lazy: false,
- unverifiedLazy: false,
- deprecated: false,
- weak: false,
- uninterpretedOption: [],
- };
-}
exports.FieldOptions = {
fromJSON(object) {
return {
ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
- packed: isSet(object.packed) ? Boolean(object.packed) : false,
+ packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
- lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
- unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- weak: isSet(object.weak) ? Boolean(object.weak) : false,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
+ unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
+ debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+ retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
+ targets: globalThis.Array.isArray(object?.targets)
+ ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
+ : [],
+ editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
+ ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
+ : [],
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ featureSupport: isSet(object.featureSupport)
+ ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+ : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
- message.packed !== undefined && (obj.packed = message.packed);
- message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
- message.lazy !== undefined && (obj.lazy = message.lazy);
- message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- message.weak !== undefined && (obj.weak = message.weak);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
- }
- else {
- obj.uninterpretedOption = [];
+ if (message.ctype !== undefined && message.ctype !== 0) {
+ obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
+ }
+ if (message.packed !== undefined && message.packed !== false) {
+ obj.packed = message.packed;
+ }
+ if (message.jstype !== undefined && message.jstype !== 0) {
+ obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
+ }
+ if (message.lazy !== undefined && message.lazy !== false) {
+ obj.lazy = message.lazy;
+ }
+ if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
+ obj.unverifiedLazy = message.unverifiedLazy;
+ }
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.weak !== undefined && message.weak !== false) {
+ obj.weak = message.weak;
+ }
+ if (message.debugRedact !== undefined && message.debugRedact !== false) {
+ obj.debugRedact = message.debugRedact;
+ }
+ if (message.retention !== undefined && message.retention !== 0) {
+ obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
+ }
+ if (message.targets?.length) {
+ obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
+ }
+ if (message.editionDefaults?.length) {
+ obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
+ }
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.featureSupport !== undefined) {
+ obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+ }
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+ }
+ return obj;
+ },
+};
+exports.FieldOptions_EditionDefault = {
+ fromJSON(object) {
+ return {
+ edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+ value: isSet(object.value) ? globalThis.String(object.value) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.edition !== undefined && message.edition !== 0) {
+ obj.edition = editionToJSON(message.edition);
+ }
+ if (message.value !== undefined && message.value !== "") {
+ obj.value = message.value;
+ }
+ return obj;
+ },
+};
+exports.FieldOptions_FeatureSupport = {
+ fromJSON(object) {
+ return {
+ editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
+ editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
+ deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
+ editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
+ obj.editionIntroduced = editionToJSON(message.editionIntroduced);
+ }
+ if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
+ obj.editionDeprecated = editionToJSON(message.editionDeprecated);
+ }
+ if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
+ obj.deprecationWarning = message.deprecationWarning;
+ }
+ if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
+ obj.editionRemoved = editionToJSON(message.editionRemoved);
}
return obj;
},
};
-function createBaseOneofOptions() {
- return { uninterpretedOption: [] };
-}
exports.OneofOptions = {
fromJSON(object) {
return {
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
}
- else {
- obj.uninterpretedOption = [];
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseEnumOptions() {
- return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
-}
exports.EnumOptions = {
fromJSON(object) {
return {
- allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+ ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+ : false,
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.allowAlias !== undefined && message.allowAlias !== false) {
+ obj.allowAlias = message.allowAlias;
}
- else {
- obj.uninterpretedOption = [];
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+ obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+ }
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseEnumValueOptions() {
- return { deprecated: false, uninterpretedOption: [] };
-}
exports.EnumValueOptions = {
fromJSON(object) {
return {
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+ featureSupport: isSet(object.featureSupport)
+ ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+ : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.debugRedact !== undefined && message.debugRedact !== false) {
+ obj.debugRedact = message.debugRedact;
+ }
+ if (message.featureSupport !== undefined) {
+ obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
}
- else {
- obj.uninterpretedOption = [];
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseServiceOptions() {
- return { deprecated: false, uninterpretedOption: [] };
-}
exports.ServiceOptions = {
fromJSON(object) {
return {
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
}
- else {
- obj.uninterpretedOption = [];
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseMethodOptions() {
- return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
-}
exports.MethodOptions = {
fromJSON(object) {
return {
- deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
idempotencyLevel: isSet(object.idempotencyLevel)
? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
: 0,
- uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+ uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.deprecated !== undefined && (obj.deprecated = message.deprecated);
- message.idempotencyLevel !== undefined &&
- (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
- if (message.uninterpretedOption) {
- obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ if (message.deprecated !== undefined && message.deprecated !== false) {
+ obj.deprecated = message.deprecated;
+ }
+ if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
+ obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
}
- else {
- obj.uninterpretedOption = [];
+ if (message.features !== undefined) {
+ obj.features = exports.FeatureSet.toJSON(message.features);
+ }
+ if (message.uninterpretedOption?.length) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
}
return obj;
},
};
-function createBaseUninterpretedOption() {
- return {
- name: [],
- identifierValue: "",
- positiveIntValue: "0",
- negativeIntValue: "0",
- doubleValue: 0,
- stringValue: Buffer.alloc(0),
- aggregateValue: "",
- };
-}
exports.UninterpretedOption = {
fromJSON(object) {
return {
- name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
- identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
- positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
- negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
- doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+ name: globalThis.Array.isArray(object?.name)
+ ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
+ : [],
+ identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
+ positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
+ negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
+ doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
- aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+ aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
};
},
toJSON(message) {
const obj = {};
- if (message.name) {
- obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
- }
- else {
- obj.name = [];
- }
- message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
- message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
- message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
- message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
- message.stringValue !== undefined &&
- (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
- message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+ if (message.name?.length) {
+ obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
+ }
+ if (message.identifierValue !== undefined && message.identifierValue !== "") {
+ obj.identifierValue = message.identifierValue;
+ }
+ if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
+ obj.positiveIntValue = message.positiveIntValue;
+ }
+ if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
+ obj.negativeIntValue = message.negativeIntValue;
+ }
+ if (message.doubleValue !== undefined && message.doubleValue !== 0) {
+ obj.doubleValue = message.doubleValue;
+ }
+ if (message.stringValue !== undefined && message.stringValue.length !== 0) {
+ obj.stringValue = base64FromBytes(message.stringValue);
+ }
+ if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
+ obj.aggregateValue = message.aggregateValue;
+ }
return obj;
},
};
-function createBaseUninterpretedOption_NamePart() {
- return { namePart: "", isExtension: false };
-}
exports.UninterpretedOption_NamePart = {
fromJSON(object) {
return {
- namePart: isSet(object.namePart) ? String(object.namePart) : "",
- isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+ namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
+ isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
};
},
toJSON(message) {
const obj = {};
- message.namePart !== undefined && (obj.namePart = message.namePart);
- message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+ if (message.namePart !== "") {
+ obj.namePart = message.namePart;
+ }
+ if (message.isExtension !== false) {
+ obj.isExtension = message.isExtension;
+ }
+ return obj;
+ },
+};
+exports.FeatureSet = {
+ fromJSON(object) {
+ return {
+ fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
+ enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
+ repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
+ ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
+ : 0,
+ utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
+ messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
+ jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
+ enforceNamingStyle: isSet(object.enforceNamingStyle)
+ ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
+ : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
+ obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
+ }
+ if (message.enumType !== undefined && message.enumType !== 0) {
+ obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
+ }
+ if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
+ obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
+ }
+ if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
+ obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
+ }
+ if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
+ obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
+ }
+ if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
+ obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
+ }
+ if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
+ obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
+ }
+ return obj;
+ },
+};
+exports.FeatureSetDefaults = {
+ fromJSON(object) {
+ return {
+ defaults: globalThis.Array.isArray(object?.defaults)
+ ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
+ : [],
+ minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
+ maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.defaults?.length) {
+ obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
+ }
+ if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
+ obj.minimumEdition = editionToJSON(message.minimumEdition);
+ }
+ if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
+ obj.maximumEdition = editionToJSON(message.maximumEdition);
+ }
+ return obj;
+ },
+};
+exports.FeatureSetDefaults_FeatureSetEditionDefault = {
+ fromJSON(object) {
+ return {
+ edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+ overridableFeatures: isSet(object.overridableFeatures)
+ ? exports.FeatureSet.fromJSON(object.overridableFeatures)
+ : undefined,
+ fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.edition !== undefined && message.edition !== 0) {
+ obj.edition = editionToJSON(message.edition);
+ }
+ if (message.overridableFeatures !== undefined) {
+ obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
+ }
+ if (message.fixedFeatures !== undefined) {
+ obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
+ }
return obj;
},
};
-function createBaseSourceCodeInfo() {
- return { location: [] };
-}
exports.SourceCodeInfo = {
fromJSON(object) {
return {
- location: Array.isArray(object?.location)
+ location: globalThis.Array.isArray(object?.location)
? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.location) {
- obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
- }
- else {
- obj.location = [];
+ if (message.location?.length) {
+ obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
}
return obj;
},
};
-function createBaseSourceCodeInfo_Location() {
- return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
-}
exports.SourceCodeInfo_Location = {
fromJSON(object) {
return {
- path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
- span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
- leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
- trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
- leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
- ? object.leadingDetachedComments.map((e) => String(e))
+ path: globalThis.Array.isArray(object?.path)
+ ? object.path.map((e) => globalThis.Number(e))
+ : [],
+ span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
+ leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
+ trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
+ leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
+ ? object.leadingDetachedComments.map((e) => globalThis.String(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.path) {
+ if (message.path?.length) {
obj.path = message.path.map((e) => Math.round(e));
}
- else {
- obj.path = [];
- }
- if (message.span) {
+ if (message.span?.length) {
obj.span = message.span.map((e) => Math.round(e));
}
- else {
- obj.span = [];
+ if (message.leadingComments !== undefined && message.leadingComments !== "") {
+ obj.leadingComments = message.leadingComments;
}
- message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
- message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
- if (message.leadingDetachedComments) {
- obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+ if (message.trailingComments !== undefined && message.trailingComments !== "") {
+ obj.trailingComments = message.trailingComments;
}
- else {
- obj.leadingDetachedComments = [];
+ if (message.leadingDetachedComments?.length) {
+ obj.leadingDetachedComments = message.leadingDetachedComments;
}
return obj;
},
};
-function createBaseGeneratedCodeInfo() {
- return { annotation: [] };
-}
exports.GeneratedCodeInfo = {
fromJSON(object) {
return {
- annotation: Array.isArray(object?.annotation)
+ annotation: globalThis.Array.isArray(object?.annotation)
? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.annotation) {
- obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
- }
- else {
- obj.annotation = [];
+ if (message.annotation?.length) {
+ obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
}
return obj;
},
};
-function createBaseGeneratedCodeInfo_Annotation() {
- return { path: [], sourceFile: "", begin: 0, end: 0 };
-}
exports.GeneratedCodeInfo_Annotation = {
fromJSON(object) {
return {
- path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
- sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
- begin: isSet(object.begin) ? Number(object.begin) : 0,
- end: isSet(object.end) ? Number(object.end) : 0,
+ path: globalThis.Array.isArray(object?.path)
+ ? object.path.map((e) => globalThis.Number(e))
+ : [],
+ sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
+ begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
+ end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+ semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
};
},
toJSON(message) {
const obj = {};
- if (message.path) {
+ if (message.path?.length) {
obj.path = message.path.map((e) => Math.round(e));
}
- else {
- obj.path = [];
+ if (message.sourceFile !== undefined && message.sourceFile !== "") {
+ obj.sourceFile = message.sourceFile;
+ }
+ if (message.begin !== undefined && message.begin !== 0) {
+ obj.begin = Math.round(message.begin);
+ }
+ if (message.end !== undefined && message.end !== 0) {
+ obj.end = Math.round(message.end);
+ }
+ if (message.semantic !== undefined && message.semantic !== 0) {
+ obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
}
- message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
- message.begin !== undefined && (obj.begin = Math.round(message.begin));
- message.end !== undefined && (obj.end = Math.round(message.end));
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
index 159135fe87172a..8b75b604c231c8 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -1,21 +1,26 @@
"use strict";
-/* eslint-disable */
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: google/protobuf/timestamp.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Timestamp = void 0;
-function createBaseTimestamp() {
- return { seconds: "0", nanos: 0 };
-}
exports.Timestamp = {
fromJSON(object) {
return {
- seconds: isSet(object.seconds) ? String(object.seconds) : "0",
- nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+ seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
+ nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
};
},
toJSON(message) {
const obj = {};
- message.seconds !== undefined && (obj.seconds = message.seconds);
- message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+ if (message.seconds !== "0") {
+ obj.seconds = message.seconds;
+ }
+ if (message.nanos !== 0) {
+ obj.nanos = Math.round(message.nanos);
+ }
return obj;
},
};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
new file mode 100644
index 00000000000000..13099ddc3631a1
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -0,0 +1,55 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: rekor/v2/dsse.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
+/* eslint-disable */
+const envelope_1 = require("../../envelope");
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.DSSERequestV002 = {
+ fromJSON(object) {
+ return {
+ envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
+ verifiers: globalThis.Array.isArray(object?.verifiers)
+ ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.envelope !== undefined) {
+ obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
+ }
+ if (message.verifiers?.length) {
+ obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
+ }
+ return obj;
+ },
+};
+exports.DSSELogEntryV002 = {
+ fromJSON(object) {
+ return {
+ payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
+ signatures: globalThis.Array.isArray(object?.signatures)
+ ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.payloadHash !== undefined) {
+ obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
+ }
+ if (message.signatures?.length) {
+ obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
+ }
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
new file mode 100644
index 00000000000000..177fc0cbf3482e
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -0,0 +1,81 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: rekor/v2/entry.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
+/* eslint-disable */
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+exports.Entry = {
+ fromJSON(object) {
+ return {
+ kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+ apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
+ spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.kind !== "") {
+ obj.kind = message.kind;
+ }
+ if (message.apiVersion !== "") {
+ obj.apiVersion = message.apiVersion;
+ }
+ if (message.spec !== undefined) {
+ obj.spec = exports.Spec.toJSON(message.spec);
+ }
+ return obj;
+ },
+};
+exports.Spec = {
+ fromJSON(object) {
+ return {
+ spec: isSet(object.hashedRekordV002)
+ ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
+ : isSet(object.dsseV002)
+ ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.spec?.$case === "hashedRekordV002") {
+ obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
+ }
+ else if (message.spec?.$case === "dsseV002") {
+ obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
+ }
+ return obj;
+ },
+};
+exports.CreateEntryRequest = {
+ fromJSON(object) {
+ return {
+ spec: isSet(object.hashedRekordRequestV002)
+ ? {
+ $case: "hashedRekordRequestV002",
+ hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
+ }
+ : isSet(object.dsseRequestV002)
+ ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.spec?.$case === "hashedRekordRequestV002") {
+ obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
+ }
+ else if (message.spec?.$case === "dsseRequestV002") {
+ obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
+ }
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
new file mode 100644
index 00000000000000..ed0d16494e06ff
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -0,0 +1,56 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: rekor/v2/hashedrekord.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.HashedRekordRequestV002 = {
+ fromJSON(object) {
+ return {
+ digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+ signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.digest.length !== 0) {
+ obj.digest = base64FromBytes(message.digest);
+ }
+ if (message.signature !== undefined) {
+ obj.signature = verifier_1.Signature.toJSON(message.signature);
+ }
+ return obj;
+ },
+};
+exports.HashedRekordLogEntryV002 = {
+ fromJSON(object) {
+ return {
+ data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
+ signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.data !== undefined) {
+ obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
+ }
+ if (message.signature !== undefined) {
+ obj.signature = verifier_1.Signature.toJSON(message.signature);
+ }
+ return obj;
+ },
+};
+function bytesFromBase64(b64) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+ return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
new file mode 100644
index 00000000000000..cc32d84bd7fae2
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -0,0 +1,74 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: rekor/v2/verifier.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Verifier = exports.PublicKey = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+exports.PublicKey = {
+ fromJSON(object) {
+ return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.rawBytes.length !== 0) {
+ obj.rawBytes = base64FromBytes(message.rawBytes);
+ }
+ return obj;
+ },
+};
+exports.Verifier = {
+ fromJSON(object) {
+ return {
+ verifier: isSet(object.publicKey)
+ ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
+ : isSet(object.x509Certificate)
+ ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
+ : undefined,
+ keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.verifier?.$case === "publicKey") {
+ obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
+ }
+ else if (message.verifier?.$case === "x509Certificate") {
+ obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
+ }
+ if (message.keyDetails !== 0) {
+ obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
+ }
+ return obj;
+ },
+};
+exports.Signature = {
+ fromJSON(object) {
+ return {
+ content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
+ verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.content.length !== 0) {
+ obj.content = base64FromBytes(message.content);
+ }
+ if (message.verifier !== undefined) {
+ obj.verifier = exports.Verifier.toJSON(message.verifier);
+ }
+ return obj;
+ },
+};
+function bytesFromBase64(b64) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+ return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
index 3773867f5426a3..0f0a27b662eba5 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -1,35 +1,31 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: sigstore_bundle.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
/* eslint-disable */
const envelope_1 = require("./envelope");
const sigstore_common_1 = require("./sigstore_common");
const sigstore_rekor_1 = require("./sigstore_rekor");
-function createBaseTimestampVerificationData() {
- return { rfc3161Timestamps: [] };
-}
exports.TimestampVerificationData = {
fromJSON(object) {
return {
- rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+ rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.rfc3161Timestamps) {
- obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
- }
- else {
- obj.rfc3161Timestamps = [];
+ if (message.rfc3161Timestamps?.length) {
+ obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
}
return obj;
},
};
-function createBaseVerificationMaterial() {
- return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
-}
exports.VerificationMaterial = {
fromJSON(object) {
return {
@@ -43,7 +39,7 @@ exports.VerificationMaterial = {
: isSet(object.certificate)
? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
: undefined,
- tlogEntries: Array.isArray(object?.tlogEntries)
+ tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
: [],
timestampVerificationData: isSet(object.timestampVerificationData)
@@ -53,36 +49,28 @@ exports.VerificationMaterial = {
},
toJSON(message) {
const obj = {};
- message.content?.$case === "publicKey" &&
- (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
- message.content?.$case === "x509CertificateChain" &&
- (obj.x509CertificateChain = message.content?.x509CertificateChain
- ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
- : undefined);
- message.content?.$case === "certificate" &&
- (obj.certificate = message.content?.certificate
- ? sigstore_common_1.X509Certificate.toJSON(message.content?.certificate)
- : undefined);
- if (message.tlogEntries) {
- obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+ if (message.content?.$case === "publicKey") {
+ obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
+ }
+ else if (message.content?.$case === "x509CertificateChain") {
+ obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
+ }
+ else if (message.content?.$case === "certificate") {
+ obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
+ }
+ if (message.tlogEntries?.length) {
+ obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
}
- else {
- obj.tlogEntries = [];
+ if (message.timestampVerificationData !== undefined) {
+ obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
}
- message.timestampVerificationData !== undefined &&
- (obj.timestampVerificationData = message.timestampVerificationData
- ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
- : undefined);
return obj;
},
};
-function createBaseBundle() {
- return { mediaType: "", verificationMaterial: undefined, content: undefined };
-}
exports.Bundle = {
fromJSON(object) {
return {
- mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
verificationMaterial: isSet(object.verificationMaterial)
? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
: undefined,
@@ -95,15 +83,18 @@ exports.Bundle = {
},
toJSON(message) {
const obj = {};
- message.mediaType !== undefined && (obj.mediaType = message.mediaType);
- message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
- ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
- : undefined);
- message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
- ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
- : undefined);
- message.content?.$case === "dsseEnvelope" &&
- (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+ if (message.mediaType !== "") {
+ obj.mediaType = message.mediaType;
+ }
+ if (message.verificationMaterial !== undefined) {
+ obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
+ }
+ if (message.content?.$case === "messageSignature") {
+ obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
+ }
+ else if (message.content?.$case === "dsseEnvelope") {
+ obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
+ }
return obj;
},
};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
index c6f9baa91fff21..fd62147feaef79 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -1,6 +1,17 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: sigstore_common.proto
Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
/* eslint-disable */
const timestamp_1 = require("./google/protobuf/timestamp");
/**
@@ -20,7 +31,7 @@ var HashAlgorithm;
HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
-})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
function hashAlgorithmFromJSON(object) {
switch (object) {
case 0:
@@ -42,10 +53,9 @@ function hashAlgorithmFromJSON(object) {
case "SHA3_384":
return HashAlgorithm.SHA3_384;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
function hashAlgorithmToJSON(object) {
switch (object) {
case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
@@ -61,10 +71,9 @@ function hashAlgorithmToJSON(object) {
case HashAlgorithm.SHA3_384:
return "SHA3_384";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
/**
* Details of a specific public key, capturing the the key encoding method,
* and signature algorithm.
@@ -76,7 +85,8 @@ exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
* opinionated options instead of allowing every possible permutation.
*
* Any changes to this enum MUST be reflected in the algorithm registry.
- * See: docs/algorithm-registry.md
+ *
+ * See:
*
* To avoid the possibility of contradicting formats such as PKCS1 with
* ED25519 the valid permutations are listed as a linear set instead of a
@@ -123,11 +133,21 @@ var PublicKeyDetails;
/** PKIX_ED25519 - Ed 25519 */
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+ /**
+ * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
+ * were/are being used by most Sigstore clients implementations.
+ *
+ * @deprecated
+ */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
+ /** @deprecated */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
/**
* LMS_SHA256 - LMS and LM-OTS
*
- * These keys and signatures may be used by private Sigstore
- * deployments, but are not currently supported by the public
+ * These algorithms are deprecated and should not be used.
+ * Keys and signatures MAY be used by private Sigstore
+ * deployments, but will not be supported by the public
* good instance.
*
* USER WARNING: LMS and LM-OTS are both stateful signature schemes.
@@ -137,10 +157,30 @@ var PublicKeyDetails;
* MUST NOT be used for more than one signature per LM-OTS key.
* If you cannot maintain these invariants, you MUST NOT use these
* schemes.
+ *
+ * @deprecated
*/
PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+ /** @deprecated */
PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
-})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+ /**
+ * ML_DSA_65 - ML-DSA
+ *
+ * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
+ * take data to sign rather than the prehash variants (HashML-DSA), which
+ * take digests. While considered quantum-resistant, their usage
+ * involves tradeoffs in that signatures and keys are much larger, and
+ * this makes deployments more costly.
+ *
+ * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
+ * In the future they MAY be used by private Sigstore deployments, but
+ * they are not yet fully functional. This warning will be removed when
+ * these algorithms are widely supported by Sigstore clients and servers,
+ * but care should still be taken for production environments.
+ */
+ PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
+ PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
+})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
function publicKeyDetailsFromJSON(object) {
switch (object) {
case 0:
@@ -194,17 +234,28 @@ function publicKeyDetailsFromJSON(object) {
case 8:
case "PKIX_ED25519_PH":
return PublicKeyDetails.PKIX_ED25519_PH;
+ case 19:
+ case "PKIX_ECDSA_P384_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
+ case 20:
+ case "PKIX_ECDSA_P521_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
case 14:
case "LMS_SHA256":
return PublicKeyDetails.LMS_SHA256;
case 15:
case "LMOTS_SHA256":
return PublicKeyDetails.LMOTS_SHA256;
+ case 21:
+ case "ML_DSA_65":
+ return PublicKeyDetails.ML_DSA_65;
+ case 22:
+ case "ML_DSA_87":
+ return PublicKeyDetails.ML_DSA_87;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
function publicKeyDetailsToJSON(object) {
switch (object) {
case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
@@ -241,15 +292,22 @@ function publicKeyDetailsToJSON(object) {
return "PKIX_ED25519";
case PublicKeyDetails.PKIX_ED25519_PH:
return "PKIX_ED25519_PH";
+ case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
+ return "PKIX_ECDSA_P384_SHA_256";
+ case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
+ return "PKIX_ECDSA_P521_SHA_256";
case PublicKeyDetails.LMS_SHA256:
return "LMS_SHA256";
case PublicKeyDetails.LMOTS_SHA256:
return "LMOTS_SHA256";
+ case PublicKeyDetails.ML_DSA_65:
+ return "ML_DSA_65";
+ case PublicKeyDetails.ML_DSA_87:
+ return "ML_DSA_87";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
var SubjectAlternativeNameType;
(function (SubjectAlternativeNameType) {
SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
@@ -261,7 +319,7 @@ var SubjectAlternativeNameType;
* for more details.
*/
SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
function subjectAlternativeNameTypeFromJSON(object) {
switch (object) {
case 0:
@@ -277,10 +335,9 @@ function subjectAlternativeNameTypeFromJSON(object) {
case "OTHER_NAME":
return SubjectAlternativeNameType.OTHER_NAME;
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
function subjectAlternativeNameTypeToJSON(object) {
switch (object) {
case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
@@ -292,13 +349,9 @@ function subjectAlternativeNameTypeToJSON(object) {
case SubjectAlternativeNameType.OTHER_NAME:
return "OTHER_NAME";
default:
- throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-function createBaseHashOutput() {
- return { algorithm: 0, digest: Buffer.alloc(0) };
-}
exports.HashOutput = {
fromJSON(object) {
return {
@@ -308,15 +361,15 @@ exports.HashOutput = {
},
toJSON(message) {
const obj = {};
- message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
- message.digest !== undefined &&
- (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+ if (message.algorithm !== 0) {
+ obj.algorithm = hashAlgorithmToJSON(message.algorithm);
+ }
+ if (message.digest.length !== 0) {
+ obj.digest = base64FromBytes(message.digest);
+ }
return obj;
},
};
-function createBaseMessageSignature() {
- return { messageDigest: undefined, signature: Buffer.alloc(0) };
-}
exports.MessageSignature = {
fromJSON(object) {
return {
@@ -326,30 +379,27 @@ exports.MessageSignature = {
},
toJSON(message) {
const obj = {};
- message.messageDigest !== undefined &&
- (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
- message.signature !== undefined &&
- (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+ if (message.messageDigest !== undefined) {
+ obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
+ }
+ if (message.signature.length !== 0) {
+ obj.signature = base64FromBytes(message.signature);
+ }
return obj;
},
};
-function createBaseLogId() {
- return { keyId: Buffer.alloc(0) };
-}
exports.LogId = {
fromJSON(object) {
return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
},
toJSON(message) {
const obj = {};
- message.keyId !== undefined &&
- (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+ if (message.keyId.length !== 0) {
+ obj.keyId = base64FromBytes(message.keyId);
+ }
return obj;
},
};
-function createBaseRFC3161SignedTimestamp() {
- return { signedTimestamp: Buffer.alloc(0) };
-}
exports.RFC3161SignedTimestamp = {
fromJSON(object) {
return {
@@ -360,14 +410,12 @@ exports.RFC3161SignedTimestamp = {
},
toJSON(message) {
const obj = {};
- message.signedTimestamp !== undefined &&
- (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+ if (message.signedTimestamp.length !== 0) {
+ obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
+ }
return obj;
},
};
-function createBasePublicKey() {
- return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
-}
exports.PublicKey = {
fromJSON(object) {
return {
@@ -378,48 +426,42 @@ exports.PublicKey = {
},
toJSON(message) {
const obj = {};
- message.rawBytes !== undefined &&
- (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
- message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
- message.validFor !== undefined &&
- (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+ if (message.rawBytes !== undefined) {
+ obj.rawBytes = base64FromBytes(message.rawBytes);
+ }
+ if (message.keyDetails !== 0) {
+ obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
+ }
+ if (message.validFor !== undefined) {
+ obj.validFor = exports.TimeRange.toJSON(message.validFor);
+ }
return obj;
},
};
-function createBasePublicKeyIdentifier() {
- return { hint: "" };
-}
exports.PublicKeyIdentifier = {
fromJSON(object) {
- return { hint: isSet(object.hint) ? String(object.hint) : "" };
+ return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
},
toJSON(message) {
const obj = {};
- message.hint !== undefined && (obj.hint = message.hint);
+ if (message.hint !== "") {
+ obj.hint = message.hint;
+ }
return obj;
},
};
-function createBaseObjectIdentifier() {
- return { id: [] };
-}
exports.ObjectIdentifier = {
fromJSON(object) {
- return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+ return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
},
toJSON(message) {
const obj = {};
- if (message.id) {
+ if (message.id?.length) {
obj.id = message.id.map((e) => Math.round(e));
}
- else {
- obj.id = [];
- }
return obj;
},
};
-function createBaseObjectIdentifierValuePair() {
- return { oid: undefined, value: Buffer.alloc(0) };
-}
exports.ObjectIdentifierValuePair = {
fromJSON(object) {
return {
@@ -429,90 +471,86 @@ exports.ObjectIdentifierValuePair = {
},
toJSON(message) {
const obj = {};
- message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
- message.value !== undefined &&
- (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ if (message.oid !== undefined) {
+ obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
+ }
+ if (message.value.length !== 0) {
+ obj.value = base64FromBytes(message.value);
+ }
return obj;
},
};
-function createBaseDistinguishedName() {
- return { organization: "", commonName: "" };
-}
exports.DistinguishedName = {
fromJSON(object) {
return {
- organization: isSet(object.organization) ? String(object.organization) : "",
- commonName: isSet(object.commonName) ? String(object.commonName) : "",
+ organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
+ commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
};
},
toJSON(message) {
const obj = {};
- message.organization !== undefined && (obj.organization = message.organization);
- message.commonName !== undefined && (obj.commonName = message.commonName);
+ if (message.organization !== "") {
+ obj.organization = message.organization;
+ }
+ if (message.commonName !== "") {
+ obj.commonName = message.commonName;
+ }
return obj;
},
};
-function createBaseX509Certificate() {
- return { rawBytes: Buffer.alloc(0) };
-}
exports.X509Certificate = {
fromJSON(object) {
return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
},
toJSON(message) {
const obj = {};
- message.rawBytes !== undefined &&
- (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+ if (message.rawBytes.length !== 0) {
+ obj.rawBytes = base64FromBytes(message.rawBytes);
+ }
return obj;
},
};
-function createBaseSubjectAlternativeName() {
- return { type: 0, identity: undefined };
-}
exports.SubjectAlternativeName = {
fromJSON(object) {
return {
type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
identity: isSet(object.regexp)
- ? { $case: "regexp", regexp: String(object.regexp) }
+ ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
: isSet(object.value)
- ? { $case: "value", value: String(object.value) }
+ ? { $case: "value", value: globalThis.String(object.value) }
: undefined,
};
},
toJSON(message) {
const obj = {};
- message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
- message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
- message.identity?.$case === "value" && (obj.value = message.identity?.value);
+ if (message.type !== 0) {
+ obj.type = subjectAlternativeNameTypeToJSON(message.type);
+ }
+ if (message.identity?.$case === "regexp") {
+ obj.regexp = message.identity.regexp;
+ }
+ else if (message.identity?.$case === "value") {
+ obj.value = message.identity.value;
+ }
return obj;
},
};
-function createBaseX509CertificateChain() {
- return { certificates: [] };
-}
exports.X509CertificateChain = {
fromJSON(object) {
return {
- certificates: Array.isArray(object?.certificates)
+ certificates: globalThis.Array.isArray(object?.certificates)
? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.certificates) {
- obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
- }
- else {
- obj.certificates = [];
+ if (message.certificates?.length) {
+ obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
}
return obj;
},
};
-function createBaseTimeRange() {
- return { start: undefined, end: undefined };
-}
exports.TimeRange = {
fromJSON(object) {
return {
@@ -522,62 +560,32 @@ exports.TimeRange = {
},
toJSON(message) {
const obj = {};
- message.start !== undefined && (obj.start = message.start.toISOString());
- message.end !== undefined && (obj.end = message.end.toISOString());
+ if (message.start !== undefined) {
+ obj.start = message.start.toISOString();
+ }
+ if (message.end !== undefined) {
+ obj.end = message.end.toISOString();
+ }
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function fromTimestamp(t) {
- let millis = Number(t.seconds) * 1000;
- millis += t.nanos / 1000000;
- return new Date(millis);
+ let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+ millis += (t.nanos || 0) / 1_000_000;
+ return new globalThis.Date(millis);
}
function fromJsonTimestamp(o) {
- if (o instanceof Date) {
+ if (o instanceof globalThis.Date) {
return o;
}
else if (typeof o === "string") {
- return new Date(o);
+ return new globalThis.Date(o);
}
else {
return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
index 398193b2075a70..9f9b3d0d1b4611 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -1,71 +1,75 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: sigstore_rekor.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
/* eslint-disable */
const sigstore_common_1 = require("./sigstore_common");
-function createBaseKindVersion() {
- return { kind: "", version: "" };
-}
exports.KindVersion = {
fromJSON(object) {
return {
- kind: isSet(object.kind) ? String(object.kind) : "",
- version: isSet(object.version) ? String(object.version) : "",
+ kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+ version: isSet(object.version) ? globalThis.String(object.version) : "",
};
},
toJSON(message) {
const obj = {};
- message.kind !== undefined && (obj.kind = message.kind);
- message.version !== undefined && (obj.version = message.version);
+ if (message.kind !== "") {
+ obj.kind = message.kind;
+ }
+ if (message.version !== "") {
+ obj.version = message.version;
+ }
return obj;
},
};
-function createBaseCheckpoint() {
- return { envelope: "" };
-}
exports.Checkpoint = {
fromJSON(object) {
- return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+ return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
},
toJSON(message) {
const obj = {};
- message.envelope !== undefined && (obj.envelope = message.envelope);
+ if (message.envelope !== "") {
+ obj.envelope = message.envelope;
+ }
return obj;
},
};
-function createBaseInclusionProof() {
- return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
-}
exports.InclusionProof = {
fromJSON(object) {
return {
- logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
- treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
- hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+ treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
+ hashes: globalThis.Array.isArray(object?.hashes)
+ ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
+ : [],
checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.logIndex !== undefined && (obj.logIndex = message.logIndex);
- message.rootHash !== undefined &&
- (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
- message.treeSize !== undefined && (obj.treeSize = message.treeSize);
- if (message.hashes) {
- obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
- }
- else {
- obj.hashes = [];
- }
- message.checkpoint !== undefined &&
- (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+ if (message.logIndex !== "0") {
+ obj.logIndex = message.logIndex;
+ }
+ if (message.rootHash.length !== 0) {
+ obj.rootHash = base64FromBytes(message.rootHash);
+ }
+ if (message.treeSize !== "0") {
+ obj.treeSize = message.treeSize;
+ }
+ if (message.hashes?.length) {
+ obj.hashes = message.hashes.map((e) => base64FromBytes(e));
+ }
+ if (message.checkpoint !== undefined) {
+ obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
+ }
return obj;
},
};
-function createBaseInclusionPromise() {
- return { signedEntryTimestamp: Buffer.alloc(0) };
-}
exports.InclusionPromise = {
fromJSON(object) {
return {
@@ -76,29 +80,19 @@ exports.InclusionPromise = {
},
toJSON(message) {
const obj = {};
- message.signedEntryTimestamp !== undefined &&
- (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+ if (message.signedEntryTimestamp.length !== 0) {
+ obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
+ }
return obj;
},
};
-function createBaseTransparencyLogEntry() {
- return {
- logIndex: "0",
- logId: undefined,
- kindVersion: undefined,
- integratedTime: "0",
- inclusionPromise: undefined,
- inclusionProof: undefined,
- canonicalizedBody: Buffer.alloc(0),
- };
-}
exports.TransparencyLogEntry = {
fromJSON(object) {
return {
- logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
- integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+ integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
canonicalizedBody: isSet(object.canonicalizedBody)
@@ -108,59 +102,35 @@ exports.TransparencyLogEntry = {
},
toJSON(message) {
const obj = {};
- message.logIndex !== undefined && (obj.logIndex = message.logIndex);
- message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
- message.kindVersion !== undefined &&
- (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
- message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
- message.inclusionPromise !== undefined &&
- (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
- message.inclusionProof !== undefined &&
- (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
- message.canonicalizedBody !== undefined &&
- (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+ if (message.logIndex !== "0") {
+ obj.logIndex = message.logIndex;
+ }
+ if (message.logId !== undefined) {
+ obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+ }
+ if (message.kindVersion !== undefined) {
+ obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
+ }
+ if (message.integratedTime !== "0") {
+ obj.integratedTime = message.integratedTime;
+ }
+ if (message.inclusionPromise !== undefined) {
+ obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
+ }
+ if (message.inclusionProof !== undefined) {
+ obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
+ }
+ if (message.canonicalizedBody.length !== 0) {
+ obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
+ }
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
index 8791aba27044bd..d5f4e4ef3cddcd 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -1,155 +1,281 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: sigstore_trustroot.proto
Object.defineProperty(exports, "__esModule", { value: true });
-exports.ClientTrustConfig = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
+exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
+exports.serviceSelectorToJSON = serviceSelectorToJSON;
/* eslint-disable */
const sigstore_common_1 = require("./sigstore_common");
-function createBaseTransparencyLogInstance() {
- return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined, checkpointKeyId: undefined };
+/**
+ * ServiceSelector specifies how a client SHOULD select a set of
+ * Services to connect to. A client SHOULD throw an error if
+ * the value is SERVICE_SELECTOR_UNDEFINED.
+ */
+var ServiceSelector;
+(function (ServiceSelector) {
+ ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
+ /**
+ * ALL - Clients SHOULD select all Services based on supported API version
+ * and validity window.
+ */
+ ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
+ /**
+ * ANY - Clients SHOULD select one Service based on supported API version
+ * and validity window. It is up to the client implementation to
+ * decide how to select the Service, e.g. random or round-robin.
+ */
+ ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
+ /**
+ * EXACT - Clients SHOULD select a specific number of Services based on
+ * supported API version and validity window, using the provided
+ * `count`. It is up to the client implementation to decide how to
+ * select the Service, e.g. random or round-robin.
+ */
+ ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
+})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
+function serviceSelectorFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "SERVICE_SELECTOR_UNDEFINED":
+ return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
+ case 1:
+ case "ALL":
+ return ServiceSelector.ALL;
+ case 2:
+ case "ANY":
+ return ServiceSelector.ANY;
+ case 3:
+ case "EXACT":
+ return ServiceSelector.EXACT;
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+ }
+}
+function serviceSelectorToJSON(object) {
+ switch (object) {
+ case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
+ return "SERVICE_SELECTOR_UNDEFINED";
+ case ServiceSelector.ALL:
+ return "ALL";
+ case ServiceSelector.ANY:
+ return "ANY";
+ case ServiceSelector.EXACT:
+ return "EXACT";
+ default:
+ throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+ }
}
exports.TransparencyLogInstance = {
fromJSON(object) {
return {
- baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+ baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+ operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
- message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
- message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
- message.publicKey !== undefined &&
- (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
- message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
- message.checkpointKeyId !== undefined &&
- (obj.checkpointKeyId = message.checkpointKeyId ? sigstore_common_1.LogId.toJSON(message.checkpointKeyId) : undefined);
+ if (message.baseUrl !== "") {
+ obj.baseUrl = message.baseUrl;
+ }
+ if (message.hashAlgorithm !== 0) {
+ obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
+ }
+ if (message.publicKey !== undefined) {
+ obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
+ }
+ if (message.logId !== undefined) {
+ obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+ }
+ if (message.checkpointKeyId !== undefined) {
+ obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
+ }
+ if (message.operator !== "") {
+ obj.operator = message.operator;
+ }
return obj;
},
};
-function createBaseCertificateAuthority() {
- return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
-}
exports.CertificateAuthority = {
fromJSON(object) {
return {
subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
- uri: isSet(object.uri) ? String(object.uri) : "",
+ uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+ operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
- message.subject !== undefined &&
- (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
- message.uri !== undefined && (obj.uri = message.uri);
- message.certChain !== undefined &&
- (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
- message.validFor !== undefined &&
- (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+ if (message.subject !== undefined) {
+ obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
+ }
+ if (message.uri !== "") {
+ obj.uri = message.uri;
+ }
+ if (message.certChain !== undefined) {
+ obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
+ }
+ if (message.validFor !== undefined) {
+ obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+ }
+ if (message.operator !== "") {
+ obj.operator = message.operator;
+ }
return obj;
},
};
-function createBaseTrustedRoot() {
- return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
-}
exports.TrustedRoot = {
fromJSON(object) {
return {
- mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
- tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
- certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+ mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+ tlogs: globalThis.Array.isArray(object?.tlogs)
+ ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+ : [],
+ certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
: [],
- ctlogs: Array.isArray(object?.ctlogs)
+ ctlogs: globalThis.Array.isArray(object?.ctlogs)
? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
: [],
- timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+ timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- message.mediaType !== undefined && (obj.mediaType = message.mediaType);
- if (message.tlogs) {
- obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ if (message.mediaType !== "") {
+ obj.mediaType = message.mediaType;
+ }
+ if (message.tlogs?.length) {
+ obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+ }
+ if (message.certificateAuthorities?.length) {
+ obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+ }
+ if (message.ctlogs?.length) {
+ obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
}
- else {
- obj.tlogs = [];
+ if (message.timestampAuthorities?.length) {
+ obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+ }
+ return obj;
+ },
+};
+exports.SigningConfig = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+ caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
+ oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
+ rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
+ ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
+ : [],
+ rekorTlogConfig: isSet(object.rekorTlogConfig)
+ ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
+ : undefined,
+ tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
+ tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.mediaType !== "") {
+ obj.mediaType = message.mediaType;
}
- if (message.certificateAuthorities) {
- obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ if (message.caUrls?.length) {
+ obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
}
- else {
- obj.certificateAuthorities = [];
+ if (message.oidcUrls?.length) {
+ obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
}
- if (message.ctlogs) {
- obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ if (message.rekorTlogUrls?.length) {
+ obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
}
- else {
- obj.ctlogs = [];
+ if (message.rekorTlogConfig !== undefined) {
+ obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
}
- if (message.timestampAuthorities) {
- obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ if (message.tsaUrls?.length) {
+ obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
}
- else {
- obj.timestampAuthorities = [];
+ if (message.tsaConfig !== undefined) {
+ obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
}
return obj;
},
};
-function createBaseSigningConfig() {
- return { caUrl: "", oidcUrl: "", tlogUrls: [], tsaUrls: [] };
-}
-exports.SigningConfig = {
+exports.Service = {
fromJSON(object) {
return {
- caUrl: isSet(object.caUrl) ? String(object.caUrl) : "",
- oidcUrl: isSet(object.oidcUrl) ? String(object.oidcUrl) : "",
- tlogUrls: Array.isArray(object?.tlogUrls) ? object.tlogUrls.map((e) => String(e)) : [],
- tsaUrls: Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => String(e)) : [],
+ url: isSet(object.url) ? globalThis.String(object.url) : "",
+ majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
+ validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+ operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
};
},
toJSON(message) {
const obj = {};
- message.caUrl !== undefined && (obj.caUrl = message.caUrl);
- message.oidcUrl !== undefined && (obj.oidcUrl = message.oidcUrl);
- if (message.tlogUrls) {
- obj.tlogUrls = message.tlogUrls.map((e) => e);
+ if (message.url !== "") {
+ obj.url = message.url;
}
- else {
- obj.tlogUrls = [];
+ if (message.majorApiVersion !== 0) {
+ obj.majorApiVersion = Math.round(message.majorApiVersion);
}
- if (message.tsaUrls) {
- obj.tsaUrls = message.tsaUrls.map((e) => e);
+ if (message.validFor !== undefined) {
+ obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
}
- else {
- obj.tsaUrls = [];
+ if (message.operator !== "") {
+ obj.operator = message.operator;
+ }
+ return obj;
+ },
+};
+exports.ServiceConfiguration = {
+ fromJSON(object) {
+ return {
+ selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
+ count: isSet(object.count) ? globalThis.Number(object.count) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.selector !== 0) {
+ obj.selector = serviceSelectorToJSON(message.selector);
+ }
+ if (message.count !== 0) {
+ obj.count = Math.round(message.count);
}
return obj;
},
};
-function createBaseClientTrustConfig() {
- return { mediaType: "", trustedRoot: undefined, signingConfig: undefined };
-}
exports.ClientTrustConfig = {
fromJSON(object) {
return {
- mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
};
},
toJSON(message) {
const obj = {};
- message.mediaType !== undefined && (obj.mediaType = message.mediaType);
- message.trustedRoot !== undefined &&
- (obj.trustedRoot = message.trustedRoot ? exports.TrustedRoot.toJSON(message.trustedRoot) : undefined);
- message.signingConfig !== undefined &&
- (obj.signingConfig = message.signingConfig ? exports.SigningConfig.toJSON(message.signingConfig) : undefined);
+ if (message.mediaType !== "") {
+ obj.mediaType = message.mediaType;
+ }
+ if (message.trustedRoot !== undefined) {
+ obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
+ }
+ if (message.signingConfig !== undefined) {
+ obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
+ }
return obj;
},
};
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
index 4af83c5a546607..a616d5f0f6a216 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -1,86 +1,71 @@
"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.7.0
+// protoc v6.30.2
+// source: sigstore_verification.proto
Object.defineProperty(exports, "__esModule", { value: true });
exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
/* eslint-disable */
const sigstore_bundle_1 = require("./sigstore_bundle");
const sigstore_common_1 = require("./sigstore_common");
const sigstore_trustroot_1 = require("./sigstore_trustroot");
-function createBaseCertificateIdentity() {
- return { issuer: "", san: undefined, oids: [] };
-}
exports.CertificateIdentity = {
fromJSON(object) {
return {
- issuer: isSet(object.issuer) ? String(object.issuer) : "",
+ issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
- oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+ oids: globalThis.Array.isArray(object?.oids)
+ ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
+ : [],
};
},
toJSON(message) {
const obj = {};
- message.issuer !== undefined && (obj.issuer = message.issuer);
- message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
- if (message.oids) {
- obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+ if (message.issuer !== "") {
+ obj.issuer = message.issuer;
+ }
+ if (message.san !== undefined) {
+ obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
}
- else {
- obj.oids = [];
+ if (message.oids?.length) {
+ obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
}
return obj;
},
};
-function createBaseCertificateIdentities() {
- return { identities: [] };
-}
exports.CertificateIdentities = {
fromJSON(object) {
return {
- identities: Array.isArray(object?.identities)
+ identities: globalThis.Array.isArray(object?.identities)
? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
: [],
};
},
toJSON(message) {
const obj = {};
- if (message.identities) {
- obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
- }
- else {
- obj.identities = [];
+ if (message.identities?.length) {
+ obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
}
return obj;
},
};
-function createBasePublicKeyIdentities() {
- return { publicKeys: [] };
-}
exports.PublicKeyIdentities = {
fromJSON(object) {
return {
- publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+ publicKeys: globalThis.Array.isArray(object?.publicKeys)
+ ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
+ : [],
};
},
toJSON(message) {
const obj = {};
- if (message.publicKeys) {
- obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
- }
- else {
- obj.publicKeys = [];
+ if (message.publicKeys?.length) {
+ obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
}
return obj;
},
};
-function createBaseArtifactVerificationOptions() {
- return {
- signers: undefined,
- tlogOptions: undefined,
- ctlogOptions: undefined,
- tsaOptions: undefined,
- integratedTsOptions: undefined,
- observerOptions: undefined,
- };
-}
exports.ArtifactVerificationOptions = {
fromJSON(object) {
return {
@@ -111,150 +96,152 @@ exports.ArtifactVerificationOptions = {
},
toJSON(message) {
const obj = {};
- message.signers?.$case === "certificateIdentities" &&
- (obj.certificateIdentities = message.signers?.certificateIdentities
- ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
- : undefined);
- message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
- ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
- : undefined);
- message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
- ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
- : undefined);
- message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
- ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
- : undefined);
- message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
- ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
- : undefined);
- message.integratedTsOptions !== undefined && (obj.integratedTsOptions = message.integratedTsOptions
- ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions)
- : undefined);
- message.observerOptions !== undefined && (obj.observerOptions = message.observerOptions
- ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions)
- : undefined);
+ if (message.signers?.$case === "certificateIdentities") {
+ obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
+ }
+ else if (message.signers?.$case === "publicKeys") {
+ obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
+ }
+ if (message.tlogOptions !== undefined) {
+ obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
+ }
+ if (message.ctlogOptions !== undefined) {
+ obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
+ }
+ if (message.tsaOptions !== undefined) {
+ obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
+ }
+ if (message.integratedTsOptions !== undefined) {
+ obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
+ }
+ if (message.observerOptions !== undefined) {
+ obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
+ }
return obj;
},
};
-function createBaseArtifactVerificationOptions_TlogOptions() {
- return { threshold: 0, performOnlineVerification: false, disable: false };
-}
exports.ArtifactVerificationOptions_TlogOptions = {
fromJSON(object) {
return {
- threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
performOnlineVerification: isSet(object.performOnlineVerification)
- ? Boolean(object.performOnlineVerification)
+ ? globalThis.Boolean(object.performOnlineVerification)
: false,
- disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
- message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
- message.performOnlineVerification !== undefined &&
- (obj.performOnlineVerification = message.performOnlineVerification);
- message.disable !== undefined && (obj.disable = message.disable);
+ if (message.threshold !== 0) {
+ obj.threshold = Math.round(message.threshold);
+ }
+ if (message.performOnlineVerification !== false) {
+ obj.performOnlineVerification = message.performOnlineVerification;
+ }
+ if (message.disable !== false) {
+ obj.disable = message.disable;
+ }
return obj;
},
};
-function createBaseArtifactVerificationOptions_CtlogOptions() {
- return { threshold: 0, disable: false };
-}
exports.ArtifactVerificationOptions_CtlogOptions = {
fromJSON(object) {
return {
- threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
- disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
- message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
- message.disable !== undefined && (obj.disable = message.disable);
+ if (message.threshold !== 0) {
+ obj.threshold = Math.round(message.threshold);
+ }
+ if (message.disable !== false) {
+ obj.disable = message.disable;
+ }
return obj;
},
};
-function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
- return { threshold: 0, disable: false };
-}
exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
fromJSON(object) {
return {
- threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
- disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
- message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
- message.disable !== undefined && (obj.disable = message.disable);
+ if (message.threshold !== 0) {
+ obj.threshold = Math.round(message.threshold);
+ }
+ if (message.disable !== false) {
+ obj.disable = message.disable;
+ }
return obj;
},
};
-function createBaseArtifactVerificationOptions_TlogIntegratedTimestampOptions() {
- return { threshold: 0, disable: false };
-}
exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
fromJSON(object) {
return {
- threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
- disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
- message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
- message.disable !== undefined && (obj.disable = message.disable);
+ if (message.threshold !== 0) {
+ obj.threshold = Math.round(message.threshold);
+ }
+ if (message.disable !== false) {
+ obj.disable = message.disable;
+ }
return obj;
},
};
-function createBaseArtifactVerificationOptions_ObserverTimestampOptions() {
- return { threshold: 0, disable: false };
-}
exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
fromJSON(object) {
return {
- threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
- disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
};
},
toJSON(message) {
const obj = {};
- message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
- message.disable !== undefined && (obj.disable = message.disable);
+ if (message.threshold !== 0) {
+ obj.threshold = Math.round(message.threshold);
+ }
+ if (message.disable !== false) {
+ obj.disable = message.disable;
+ }
return obj;
},
};
-function createBaseArtifact() {
- return { data: undefined };
-}
exports.Artifact = {
fromJSON(object) {
return {
data: isSet(object.artifactUri)
- ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+ ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
: isSet(object.artifact)
? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
- : undefined,
+ : isSet(object.artifactDigest)
+ ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
+ : undefined,
};
},
toJSON(message) {
const obj = {};
- message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
- message.data?.$case === "artifact" &&
- (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+ if (message.data?.$case === "artifactUri") {
+ obj.artifactUri = message.data.artifactUri;
+ }
+ else if (message.data?.$case === "artifact") {
+ obj.artifact = base64FromBytes(message.data.artifact);
+ }
+ else if (message.data?.$case === "artifactDigest") {
+ obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
+ }
return obj;
},
};
-function createBaseInput() {
- return {
- artifactTrustRoot: undefined,
- artifactVerificationOptions: undefined,
- bundle: undefined,
- artifact: undefined,
- };
-}
exports.Input = {
fromJSON(object) {
return {
@@ -268,56 +255,26 @@ exports.Input = {
},
toJSON(message) {
const obj = {};
- message.artifactTrustRoot !== undefined &&
- (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
- message.artifactVerificationOptions !== undefined &&
- (obj.artifactVerificationOptions = message.artifactVerificationOptions
- ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
- : undefined);
- message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
- message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+ if (message.artifactTrustRoot !== undefined) {
+ obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
+ }
+ if (message.artifactVerificationOptions !== undefined) {
+ obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
+ }
+ if (message.bundle !== undefined) {
+ obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
+ }
+ if (message.artifact !== undefined) {
+ obj.artifact = exports.Artifact.toJSON(message.artifact);
+ }
return obj;
},
};
-var tsProtoGlobalThis = (() => {
- if (typeof globalThis !== "undefined") {
- return globalThis;
- }
- if (typeof self !== "undefined") {
- return self;
- }
- if (typeof window !== "undefined") {
- return window;
- }
- if (typeof global !== "undefined") {
- return global;
- }
- throw "Unable to locate global object";
-})();
function bytesFromBase64(b64) {
- if (tsProtoGlobalThis.Buffer) {
- return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
- }
- else {
- const bin = tsProtoGlobalThis.atob(b64);
- const arr = new Uint8Array(bin.length);
- for (let i = 0; i < bin.length; ++i) {
- arr[i] = bin.charCodeAt(i);
- }
- return arr;
- }
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
}
function base64FromBytes(arr) {
- if (tsProtoGlobalThis.Buffer) {
- return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
- }
- else {
- const bin = [];
- arr.forEach((byte) => {
- bin.push(String.fromCharCode(byte));
- });
- return tsProtoGlobalThis.btoa(bin.join(""));
- }
+ return globalThis.Buffer.from(arr).toString("base64");
}
function isSet(value) {
return value !== null && value !== undefined;
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
index 92ae4acbd00ec5..3080a305a8f050 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/protobuf-specs",
- "version": "0.3.2",
+ "version": "0.4.3",
"description": "code-signing for npm packages",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -21,11 +21,11 @@
},
"homepage": "https://github.com/sigstore/protobuf-specs#readme",
"devDependencies": {
- "@tsconfig/node16": "^16.1.1",
+ "@tsconfig/node18": "^18.2.4",
"@types/node": "^18.14.0",
- "typescript": "^4.9.5"
+ "typescript": "^5.7.2"
},
"engines": {
- "node": "^16.14.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
}
}
diff --git a/deps/npm/node_modules/@sigstore/tuf/package.json b/deps/npm/node_modules/@sigstore/tuf/package.json
index 808689dfddf92f..4eb105f1acf4e6 100644
--- a/deps/npm/node_modules/@sigstore/tuf/package.json
+++ b/deps/npm/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/tuf",
- "version": "3.0.0",
+ "version": "3.1.1",
"description": "Client for the Sigstore TUF repository",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -32,7 +32,7 @@
"@types/make-fetch-happen": "^10.0.4"
},
"dependencies": {
- "@sigstore/protobuf-specs": "^0.3.2",
+ "@sigstore/protobuf-specs": "^0.4.1",
"tuf-js": "^3.0.1"
},
"engines": {
diff --git a/deps/npm/node_modules/@sigstore/tuf/seeds.json b/deps/npm/node_modules/@sigstore/tuf/seeds.json
index d1d3c6b5c46040..04fe4e6ebfcdbe 100644
--- a/deps/npm/node_modules/@sigstore/tuf/seeds.json
+++ b/deps/npm/node_modules/@sigstore/tuf/seeds.json
@@ -1 +1 @@
-{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
 "signatures": [
  {
   "keyid": "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
   "sig": "30460221008ab1f6f17d4f9e6d7dcf1c88912b6b53cc10388644ae1f09bc37a082cd06003e022100e145ef4c7b782d4e8107b53437e669d0476892ce999903ae33d14448366996e7"
  },
  {
   "keyid": "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
   "sig": "3045022100c768b2f86da99569019c160a081da54ae36c34c0a3120d3cb69b53b7d113758e02204f671518f617b20d46537fae6c3b63bae8913f4f1962156105cc4f019ac35c6a"
  },
  {
   "keyid": "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
   "sig": "3045022100b4434e6995d368d23e74759acd0cb9013c83a5d3511f0f997ec54c456ae4350a022015b0e265d182d2b61dc74e155d98b3c3fbe564ba05286aa14c8df02c9b756516"
  },
  {
   "keyid": "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
   "sig": "304502210082c58411d989eb9f861410857d42381590ec9424dbdaa51e78ed13515431904e0220118185da6a6c2947131c17797e2bb7620ce26e5f301d1ceac5f2a7e58f9dcf2e"
  },
  {
   "keyid": "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70",
   "sig": "3046022100c78513854cae9c32eaa6b88e18912f48006c2757a258f917312caba75948eb9e022100d9e1b4ce0adfe9fd2e2148d7fa27a2f40ba1122bd69da7612d8d1776b013c91d"
  },
  {
   "keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
   "sig": "3045022056483a2d5d9ea9cec6e11eadfb33c484b614298faca15acf1c431b11ed7f734c022100d0c1d726af92a87e4e66459ca5adf38a05b44e1f94318423f954bae8bca5bb2e"
  },
  {
   "keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
   "sig": "3046022100d004de88024c32dc5653a9f4843cfc5215427048ad9600d2cf9c969e6edff3d2022100d9ebb798f5fc66af10899dece014a8628ccf3c5402cd4a4270207472f8f6e712"
  },
  {
   "keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
   "sig": "3046022100b7b09996c45ca2d4b05603e56baefa29718a0b71147cf8c6e66349baa61477df022100c4da80c717b4fa7bba0fd5c72da8a0499358b01358b2309f41d1456ea1e7e1d9"
  },
  {
   "keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
   "sig": "3046022100be9782c30744e411a82fa85b5138d601ce148bc19258aec64e7ec24478f38812022100caef63dcaf1a4b9a500d3bd0e3f164ec18f1b63d7a9460d9acab1066db0f016d"
  },
  {
   "keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
   "sig": "30450220746ec3f8534ce55531d0d01ff64964ef440d1e7d2c4c142409b8e9769f1ada6f022100e3b929fcd93ea18feaa0825887a7210489879a66780c07a83f4bd46e2f09ab3b"
  }
 ],
 "signed": {
  "_type": "root",
  "consistent_snapshot": true,
  "expires": "2025-02-19T08:04:32Z",
  "keys": {
   "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@santiagotorres"
   },
   "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@bobcallaway"
   },
   "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@dlorenc"
   },
   "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-online-uri": "gcpkms://projects/sigstore-root-signing/locations/global/keyRings/root/cryptoKeys/timestamp"
   },
   "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@joshuagl"
   },
   "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2": {
    "keyid_hash_algorithms": [
     "sha256",
     "sha512"
    ],
    "keytype": "ecdsa",
    "keyval": {
     "public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
    },
    "scheme": "ecdsa-sha2-nistp256",
    "x-tuf-on-ci-keyowner": "@mnm678"
   }
  },
  "roles": {
   "root": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "snapshot": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 3650,
    "x-tuf-on-ci-signing-period": 365
   },
   "targets": {
    "keyids": [
     "6f260089d5923daf20166ca657c543af618346ab971884a99962b01988bbe0c3",
     "e71a54d543835ba86adad9460379c7641fb8726d164ea766801a1c522aba7ea2",
     "22f4caec6d8e6f9555af66b3d4c3cb06a3bb23fdc7e39c916c61f462e6f52b06",
     "61643838125b440b40db6942f5cb5a31c0dc04368316eb2aaa58b95904a58222",
     "a687e5bf4fab82b0ee58d46e05c9535145a2c9afb458f43d42b45ca0fdce2a70"
    ],
    "threshold": 3
   },
   "timestamp": {
    "keyids": [
     "7247f0dbad85b147e1863bade761243cc785dcb7aa410e7105dd3d2b61a36d2c"
    ],
    "threshold": 1,
    "x-tuf-on-ci-expiry-period": 7,
    "x-tuf-on-ci-signing-period": 4
   }
  },
  "spec_version": "1.0",
  "version": 10,
  "x-tuf-on-ci-expiry-period": 182,
  "x-tuf-on-ci-signing-period": 31
 }
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
+{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGIwYmNmMTg5Y2UxYjkzZTdkYjk2NDlkNWJlNTEyYTE4ODBjMGUzNTg4NzBlMzkzM2U0MjZjNWFmYjhhNDA2MTAwMjIwNmQyMTRiZDc5YjA5ZjQ1OGNjYzUyMWEyOTBhYTk2MGM0MTcwMTRmYzE2ZTYwNmY4MjA5MWI1ZTMxODE0ODg2YSIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIiIKICB9LAogIHsKICAgImtleWlkIjogIjYxNjQzODM4MTI1YjQ0MGI0MGRiNjk0MmY1Y2I1YTMxYzBkYzA0MzY4MzE2ZWIyYWFhNThiOTU5MDRhNTgyMjIiLAogICAic2lnIjogIjMwNDUwMjIxMDBhOWI5ZTI5NGVjMjFiNjJkZmNhNmExNmExOWQwODQxODJjMTI1NzJlMzNkOWM0ZGNhYjUzMTdmYTFlOGE0NTlkMDIyMDY5ZjY4ZTU1ZWExZjk1YzVhMzY3YWFjN2E2MWE2NTc1N2Y5M2RhNWEwMDZhNWY0ZDFjZjk5NWJlODEyZDc2MDIiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ0MDIyMDc4MTE3OGVjMzkxNWNiMTZhY2E3NTdkNDBlMjg0MzVhYzUzNzhkNmI0ODdhY2IxMTFkMWVlYjMzOTM5N2Y3OWEwMjIwNzgxY2NlNDhhZTQ2ZjllNDdiOTdhODQxNGZjZjQ2NmE5ODY3MjZhNTg5NmM3MmEwZTRhYmEzMTYyY2I4MjZkZCIKICB9CiBdLAogInNpZ25lZCI6IHsKICAiX3R5cGUiOiAicm9vdCIsCiAgImNvbnNpc3RlbnRfc25hcHNob3QiOiB0cnVlLAogICJleHBpcmVzIjogIjIwMjUtMDgtMTlUMTQ6MzM6MDlaIiwKICAia2V5cyI6IHsKICAgIjBjODc0MzJjM2JmMDlmZDk5MTg5ZmRjMzJmYTVlYWVkZjRlNGE1ZmFjN2JhYjczZmEwNGEyZTBmYzY0YWY2ZjUiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVXUmlHcjUraiszSjVTc0grWnRyNW5FMkgyd083XG5CVituTzNzOTNnTGNhMThxVE96SFkxb1d5QUdEeWtNU3NHVFVCU3Q5RCtBbjBLZktzRDJtZlNNNDJRPT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2ktb25saW5lLXVyaSI6ICJnY3BrbXM6cHJvamVjdHMvc2lnc3RvcmUtcm9vdC1zaWduaW5nL2xvY2F0aW9ucy9nbG9iYWwva2V5UmluZ3Mvcm9vdC9jcnlwdG9LZXlzL3RpbWVzdGFtcC9jcnlwdG9LZXlWZXJzaW9ucy8xIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgIjZmMjYwMDg5ZDU5MjNkYWYyMDE2NmNhNjU3YzU0M2FmNjE4MzQ2YWI5NzE4ODRhOTk5NjJiMDE5ODhiYmUwYzMiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUV5OFhLc21oQllESThKYzBHd3pCeGVLYXgwY201XG5TVEtFVTY1SFBGdW5VbjQxc1Q4cGkwRmpNNElrSHovWVVtd21MVU8wV3Q3bHhoajZCa0xJSzRxWUF3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGRsb3JlbmMiCiAgIH0sCiAgICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFMGdocmg5Mkx3MVlyM2lkR1Y1V3FDdE1EQjhDeFxuK0Q4aGRDNHcyWkxOSXBsVlJvVkdMc2tZYTNnaGVNeU9qaUo4a1BpMTVhUTIvLzdQK29qN1V2SlBHdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBqb3NodWFnbCIKICAgfSwKICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVFWHN6M1NaWEZiOGpNVjQyajZwSmx5amJqUjhLXG5OM0J3b2NleHE2TE1JYjVxc1dLT1F2TE4xNk5VZWZMYzRIc3dPb3VtUnNWVmFhalNwUVM2Zm9ia1J3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQG1ubTY3OCIKICAgfQogIH0sCiAgInJvbGVzIjogewogICAicm9vdCI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgIjZmMjYwMDg5ZDU5MjNkYWYyMDE2NmNhNjU3YzU0M2FmNjE4MzQ2YWI5NzE4ODRhOTk5NjJiMDE5ODhiYmUwYzMiLAogICAgICJlNzFhNTRkNTQzODM1YmE4NmFkYWQ5NDYwMzc5Yzc2NDFmYjg3MjZkMTY0ZWE3NjY4MDFhMWM1MjJhYmE3ZWEyIiwKICAgICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiIsCiAgICAgIjYxNjQzODM4MTI1YjQ0MGI0MGRiNjk0MmY1Y2I1YTMxYzBkYzA0MzY4MzE2ZWIyYWFhNThiOTU5MDRhNTgyMjIiLAogICAgICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJzbmFwc2hvdCI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgIjBjODc0MzJjM2JmMDlmZDk5MTg5ZmRjMzJmYTVlYWVkZjRlNGE1ZmFjN2JhYjczZmEwNGEyZTBmYzY0YWY2ZjUiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDEsCiAgICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDM2NTAsCiAgICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiAzNjUKICAgfSwKICAgInRhcmdldHMiOiB7CiAgICAia2V5aWRzIjogWwogICAgICI2ZjI2MDA4OWQ1OTIzZGFmMjAxNjZjYTY1N2M1NDNhZjYxODM0NmFiOTcxODg0YTk5OTYyYjAxOTg4YmJlMGMzIiwKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMwogICB9LAogICAidGltZXN0YW1wIjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogNywKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDYKICAgfQogIH0sCiAgInNwZWNfdmVyc2lvbiI6ICIxLjAiLAogICJ2ZXJzaW9uIjogMTIsCiAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiAxOTcsCiAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNDYKIH0KfQ==","targets":{"trusted_root.json":"ewogICJtZWRpYVR5cGUiOiAiYXBwbGljYXRpb24vdm5kLmRldi5zaWdzdG9yZS50cnVzdGVkcm9vdCtqc29uO3ZlcnNpb249MC4xIiwKICAidGxvZ3MiOiBbCiAgICB7CiAgICAgICJiYXNlVXJsIjogImh0dHBzOi8vcmVrb3Iuc2lnc3RvcmUuZGV2IiwKICAgICAgImhhc2hBbGdvcml0aG0iOiAiU0hBMl8yNTYiLAogICAgICAicHVibGljS2V5IjogewogICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUyRzJZKzJ0YWJkVFY1QmNHaUJJeDBhOWZBRndya0JibUxTR3RrczRMM3FYNnlZWTB6dWZCbmhDOFVyL2l5NTVHaFdQLzlBL2JZMkxoQzMwTTkrUll0dz09IiwKICAgICAgICAia2V5RGV0YWlscyI6ICJQS0lYX0VDRFNBX1AyNTZfU0hBXzI1NiIsCiAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgInN0YXJ0IjogIjIwMjEtMDEtMTJUMTE6NTM6MjcuMDAwWiIKICAgICAgICB9CiAgICAgIH0sCiAgICAgICJsb2dJZCI6IHsKICAgICAgICAia2V5SWQiOiAid05JOWF0UUdseitWV2ZPNkxSeWdINFFVZlkvOFc0UkZ3aVQ1aTVXUmdCMD0iCiAgICAgIH0KICAgIH0KICBdLAogICJjZXJ0aWZpY2F0ZUF1dGhvcml0aWVzIjogWwogICAgewogICAgICAic3ViamVjdCI6IHsKICAgICAgICAib3JnYW5pemF0aW9uIjogInNpZ3N0b3JlLmRldiIsCiAgICAgICAgImNvbW1vbk5hbWUiOiAic2lnc3RvcmUiCiAgICAgIH0sCiAgICAgICJ1cmkiOiAiaHR0cHM6Ly9mdWxjaW8uc2lnc3RvcmUuZGV2IiwKICAgICAgImNlcnRDaGFpbiI6IHsKICAgICAgICAiY2VydGlmaWNhdGVzIjogWwogICAgICAgICAgewogICAgICAgICAgICAicmF3Qnl0ZXMiOiAiTUlJQitEQ0NBWDZnQXdJQkFnSVROVmtEWm9DaW9mUERzeTdkZm02Z2VMYnVoekFLQmdncWhrak9QUVFEQXpBcU1SVXdFd1lEVlFRS0V3eHphV2R6ZEc5eVpTNWtaWFl4RVRBUEJnTlZCQU1UQ0hOcFozTjBiM0psTUI0WERUSXhNRE13TnpBek1qQXlPVm9YRFRNeE1ESXlNekF6TWpBeU9Wb3dLakVWTUJNR0ExVUVDaE1NYzJsbmMzUnZjbVV1WkdWMk1SRXdEd1lEVlFRREV3aHphV2R6ZEc5eVpUQjJNQkFHQnlxR1NNNDlBZ0VHQlN1QkJBQWlBMklBQkxTeUE3SWk1aytwTk84WkVXWTB5bGVtV0Rvd09rTmEza0wrR1pFNVo1R1dlaEw5L0E5YlJOQTNSYnJzWjVpMEpjYXN0YVJMN1NwNWZwL2pENWR4cWMvVWRUVm5sdlMxNmFuKzJZZnN3ZS9RdUxvbFJVQ3JjT0UyKzJpQTUrdHpkNk5tTUdRd0RnWURWUjBQQVFIL0JBUURBZ0VHTUJJR0ExVWRFd0VCL3dRSU1BWUJBZjhDQVFFd0hRWURWUjBPQkJZRUZNakZIUUJCbWlRcE1sRWs2dzJ1U3UxS0J0UHNNQjhHQTFVZEl3UVlNQmFBRk1qRkhRQkJtaVFwTWxFazZ3MnVTdTFLQnRQc01Bb0dDQ3FHU000OUJBTURBMmdBTUdVQ01IOGxpV0pmTXVpNnZYWEJoakRnWTRNd3NsbU4vVEp4VmUvODNXckZvbXdtTmYwNTZ5MVg0OEY5YzRtM2Ezb3pYQUl4QUtqUmF5NS9hai9qc0tLR0lrbVFhdGpJOHV1cEhyLytDeEZ2YUpXbXBZcU5rTERHUlUrOW9yemg1aEkyUnJjdWFRPT0iCiAgICAgICAgICB9CiAgICAgICAgXQogICAgICB9LAogICAgICAidmFsaWRGb3IiOiB7CiAgICAgICAgInN0YXJ0IjogIjIwMjEtMDMtMDdUMDM6MjA6MjkuMDAwWiIsCiAgICAgICAgImVuZCI6ICIyMDIyLTEyLTMxVDIzOjU5OjU5Ljk5OVoiCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJzdWJqZWN0IjogewogICAgICAgICJvcmdhbml6YXRpb24iOiAic2lnc3RvcmUuZGV2IiwKICAgICAgICAiY29tbW9uTmFtZSI6ICJzaWdzdG9yZSIKICAgICAgfSwKICAgICAgInVyaSI6ICJodHRwczovL2Z1bGNpby5zaWdzdG9yZS5kZXYiLAogICAgICAiY2VydENoYWluIjogewogICAgICAgICJjZXJ0aWZpY2F0ZXMiOiBbCiAgICAgICAgICB7CiAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNSUlDR2pDQ0FhR2dBd0lCQWdJVUFMblZpVmZuVTBickphc21Sa0hybi9VbmZhUXdDZ1lJS29aSXpqMEVBd013S2pFVk1CTUdBMVVFQ2hNTWMybG5jM1J2Y21VdVpHVjJNUkV3RHdZRFZRUURFd2h6YVdkemRHOXlaVEFlRncweU1qQTBNVE15TURBMk1UVmFGdzB6TVRFd01EVXhNelUyTlRoYU1EY3hGVEFUQmdOVkJBb1RESE5wWjNOMGIzSmxMbVJsZGpFZU1Cd0dBMVVFQXhNVmMybG5jM1J2Y21VdGFXNTBaWEp0WldScFlYUmxNSFl3RUFZSEtvWkl6ajBDQVFZRks0RUVBQ0lEWWdBRThSVlMveXNIK05PdnVEWnlQSVp0aWxnVUY5TmxhcllwQWQ5SFAxdkJCSDFVNUNWNzdMU1M3czBaaUg0bkU3SHY3cHRTNkx2dlIvU1RrNzk4TFZnTXpMbEo0SGVJZkYzdEhTYWV4TGNZcFNBU3Ixa1MwTi9SZ0JKei85aldDaVhubzNzd2VUQU9CZ05WSFE4QkFmOEVCQU1DQVFZd0V3WURWUjBsQkF3d0NnWUlLd1lCQlFVSEF3TXdFZ1lEVlIwVEFRSC9CQWd3QmdFQi93SUJBREFkQmdOVkhRNEVGZ1FVMzlQcHoxWWtFWmI1cU5qcEtGV2l4aTRZWkQ4d0h3WURWUjBqQkJnd0ZvQVVXTUFlWDVGRnBXYXBlc3lRb1pNaTBDckZ4Zm93Q2dZSUtvWkl6ajBFQXdNRFp3QXdaQUl3UENzUUs0RFlpWllEUElhRGk1SEZLbmZ4WHg2QVNTVm1FUmZzeW5ZQmlYMlg2U0pSblpVODQvOURaZG5GdnZ4bUFqQk90NlFwQmxjNEovMER4dmtUQ3FwY2x2emlMNkJDQ1BuamRsSUIzUHUzQnhzUG15Z1VZN0lpMnpiZENkbGlpb3c9IgogICAgICAgICAgfSwKICAgICAgICAgIHsKICAgICAgICAgICAgInJhd0J5dGVzIjogIk1JSUI5ekNDQVh5Z0F3SUJBZ0lVQUxaTkFQRmR4SFB3amVEbG9Ed3lZQ2hBTy80d0NnWUlLb1pJemowRUF3TXdLakVWTUJNR0ExVUVDaE1NYzJsbmMzUnZjbVV1WkdWMk1SRXdEd1lEVlFRREV3aHphV2R6ZEc5eVpUQWVGdzB5TVRFd01EY3hNelUyTlRsYUZ3MHpNVEV3TURVeE16VTJOVGhhTUNveEZUQVRCZ05WQkFvVERITnBaM04wYjNKbExtUmxkakVSTUE4R0ExVUVBeE1JYzJsbmMzUnZjbVV3ZGpBUUJnY3Foa2pPUFFJQkJnVXJnUVFBSWdOaUFBVDdYZUZUNHJiM1BRR3dTNElhanRMazMvT2xucGdhbmdhQmNsWXBzWUJyNWkrNHluQjA3Y2ViM0xQME9JT1pkeGV4WDY5YzVpVnV5SlJRK0h6MDV5aStVRjN1QldBbEhwaVM1c2gwK0gyR0hFN1NYcmsxRUM1bTFUcjE5TDlnZzkyall6QmhNQTRHQTFVZER3RUIvd1FFQXdJQkJqQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUll3QjVma1VXbFpxbDZ6SkNoa3lMUUtzWEYrakFmQmdOVkhTTUVHREFXZ0JSWXdCNWZrVVdsWnFsNnpKQ2hreUxRS3NYRitqQUtCZ2dxaGtqT1BRUURBd05wQURCbUFqRUFqMW5IZVhacCsxM05XQk5hK0VEc0RQOEcxV1dnMXRDTVdQL1dIUHFwYVZvMGpoc3dlTkZaZ1NzMGVFN3dZSTRxQWpFQTJXQjlvdDk4c0lrb0YzdlpZZGQzL1Z0V0I1YjlUTk1lYTdJeC9zdEo1VGZjTExlQUJMRTRCTkpPc1E0dm5CSEoiCiAgICAgICAgICB9CiAgICAgICAgXQogICAgICB9LAogICAgICAidmFsaWRGb3IiOiB7CiAgICAgICAgInN0YXJ0IjogIjIwMjItMDQtMTNUMjA6MDY6MTUuMDAwWiIKICAgICAgfQogICAgfQogIF0sCiAgImN0bG9ncyI6IFsKICAgIHsKICAgICAgImJhc2VVcmwiOiAiaHR0cHM6Ly9jdGZlLnNpZ3N0b3JlLmRldi90ZXN0IiwKICAgICAgImhhc2hBbGdvcml0aG0iOiAiU0hBMl8yNTYiLAogICAgICAicHVibGljS2V5IjogewogICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUViZndSK1JKdWRYc2NnUkJScEtYMVhGRHkzUHl1ZER4ei9TZm5SaTFmVDhla3BmQmQyTzF1b3o3anIzWjhuS3p4QTY5RVVRK2VGQ0ZJM3pldWJQV1U3dz09IiwKICAgICAgICAia2V5RGV0YWlscyI6ICJQS0lYX0VDRFNBX1AyNTZfU0hBXzI1NiIsCiAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgInN0YXJ0IjogIjIwMjEtMDMtMTRUMDA6MDA6MDAuMDAwWiIsCiAgICAgICAgICAiZW5kIjogIjIwMjItMTAtMzFUMjM6NTk6NTkuOTk5WiIKICAgICAgICB9CiAgICAgIH0sCiAgICAgICJsb2dJZCI6IHsKICAgICAgICAia2V5SWQiOiAiQ0dDUzhDaFMvMmhGMGRGcko0U2NSV2NZckJZOXd6alNiZWE4SWdZMmIzST0iCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJiYXNlVXJsIjogImh0dHBzOi8vY3RmZS5zaWdzdG9yZS5kZXYvMjAyMiIsCiAgICAgICJoYXNoQWxnb3JpdGhtIjogIlNIQTJfMjU2IiwKICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAicmF3Qnl0ZXMiOiAiTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaVBTbEZpMENtRlRmRWpDVXFGOUh1Q0VjWVhOS0FhWWFsSUptQlo4eXllelBqVHFoeHJLQnBNbmFvY1Z0TEpCSTFlTTN1WG5RelFHQUpkSjRnczlGeXc9PSIsCiAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEwLTIwVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgfQogICAgICB9LAogICAgICAibG9nSWQiOiB7CiAgICAgICAgImtleUlkIjogIjNUMHdhc2JIRVRKakdSNGNtV2MzQXFKS1hyamVQSzMvaDRweWdDOHA3bzQ9IgogICAgICB9CiAgICB9CiAgXQp9Cg==","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/deps/npm/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/abbrev/lib/index.js
index 9f48801f049c9e..f7bee0c6fc7ada 100644
--- a/deps/npm/node_modules/abbrev/lib/index.js
+++ b/deps/npm/node_modules/abbrev/lib/index.js
@@ -1,7 +1,10 @@
module.exports = abbrev
function abbrev (...args) {
- let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args
+ let list = args
+ if (args.length === 1 && (Array.isArray(args[0]) || typeof args[0] === 'string')) {
+ list = [].concat(args[0])
+ }
for (let i = 0, l = list.length; i < l; i++) {
list[i] = typeof list[i] === 'string' ? list[i] : String(list[i])
diff --git a/deps/npm/node_modules/abbrev/package.json b/deps/npm/node_modules/abbrev/package.json
index 9dfcc0095d7dc6..077d4bccd0e69e 100644
--- a/deps/npm/node_modules/abbrev/package.json
+++ b/deps/npm/node_modules/abbrev/package.json
@@ -1,6 +1,6 @@
{
"name": "abbrev",
- "version": "3.0.0",
+ "version": "3.0.1",
"description": "Like ruby's abbrev module, but in js",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -21,7 +21,7 @@
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.3",
"tap": "^16.3.0"
},
"tap": {
@@ -39,7 +39,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.3",
"publish": true
}
}
diff --git a/deps/npm/node_modules/agent-base/dist/index.js b/deps/npm/node_modules/agent-base/dist/index.js
index 69396356e74db7..c3c4099c73c027 100644
--- a/deps/npm/node_modules/agent-base/dist/index.js
+++ b/deps/npm/node_modules/agent-base/dist/index.js
@@ -133,8 +133,13 @@ class Agent extends http.Agent {
.then((socket) => {
this.decrementSockets(name, fakeSocket);
if (socket instanceof http.Agent) {
- // @ts-expect-error `addRequest()` isn't defined in `@types/node`
- return socket.addRequest(req, connectOpts);
+ try {
+ // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+ return socket.addRequest(req, connectOpts);
+ }
+ catch (err) {
+ return cb(err);
+ }
}
this[INTERNAL].currentSocket = socket;
// @ts-expect-error `createSocket()` isn't defined in `@types/node`
diff --git a/deps/npm/node_modules/agent-base/package.json b/deps/npm/node_modules/agent-base/package.json
index 8e95171707fef1..175ee71fb70eae 100644
--- a/deps/npm/node_modules/agent-base/package.json
+++ b/deps/npm/node_modules/agent-base/package.json
@@ -1,6 +1,6 @@
{
"name": "agent-base",
- "version": "7.1.1",
+ "version": "7.1.3",
"description": "Turn a function into an `http.Agent` instance",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -21,9 +21,6 @@
],
"author": "Nathan Rajlich (http://n8.io/)",
"license": "MIT",
- "dependencies": {
- "debug": "^4.3.4"
- },
"devDependencies": {
"@types/debug": "^4.1.7",
"@types/jest": "^29.5.1",
@@ -34,7 +31,7 @@
"jest": "^29.5.0",
"ts-jest": "^29.1.0",
"typescript": "^5.0.4",
- "ws": "^3.3.3",
+ "ws": "^5.2.4",
"tsconfig": "0.0.0"
},
"engines": {
diff --git a/deps/npm/node_modules/aggregate-error/index.js b/deps/npm/node_modules/aggregate-error/index.js
deleted file mode 100644
index ba5bf022116855..00000000000000
--- a/deps/npm/node_modules/aggregate-error/index.js
+++ /dev/null
@@ -1,47 +0,0 @@
-'use strict';
-const indentString = require('indent-string');
-const cleanStack = require('clean-stack');
-
-const cleanInternalStack = stack => stack.replace(/\s+at .*aggregate-error\/index.js:\d+:\d+\)?/g, '');
-
-class AggregateError extends Error {
- constructor(errors) {
- if (!Array.isArray(errors)) {
- throw new TypeError(`Expected input to be an Array, got ${typeof errors}`);
- }
-
- errors = [...errors].map(error => {
- if (error instanceof Error) {
- return error;
- }
-
- if (error !== null && typeof error === 'object') {
- // Handle plain error objects with message property and/or possibly other metadata
- return Object.assign(new Error(error.message), error);
- }
-
- return new Error(error);
- });
-
- let message = errors
- .map(error => {
- // The `stack` property is not standardized, so we can't assume it exists
- return typeof error.stack === 'string' ? cleanInternalStack(cleanStack(error.stack)) : String(error);
- })
- .join('\n');
- message = '\n' + indentString(message, 4);
- super(message);
-
- this.name = 'AggregateError';
-
- Object.defineProperty(this, '_errors', {value: errors});
- }
-
- * [Symbol.iterator]() {
- for (const error of this._errors) {
- yield error;
- }
- }
-}
-
-module.exports = AggregateError;
diff --git a/deps/npm/node_modules/aggregate-error/license b/deps/npm/node_modules/aggregate-error/license
deleted file mode 100644
index e7af2f77107d73..00000000000000
--- a/deps/npm/node_modules/aggregate-error/license
+++ /dev/null
@@ -1,9 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/aggregate-error/package.json b/deps/npm/node_modules/aggregate-error/package.json
deleted file mode 100644
index 74fcc37611e642..00000000000000
--- a/deps/npm/node_modules/aggregate-error/package.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
- "name": "aggregate-error",
- "version": "3.1.0",
- "description": "Create an error from multiple errors",
- "license": "MIT",
- "repository": "sindresorhus/aggregate-error",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "engines": {
- "node": ">=8"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "keywords": [
- "aggregate",
- "error",
- "combine",
- "multiple",
- "many",
- "collection",
- "iterable",
- "iterator"
- ],
- "dependencies": {
- "clean-stack": "^2.0.0",
- "indent-string": "^4.0.0"
- },
- "devDependencies": {
- "ava": "^2.4.0",
- "tsd": "^0.7.1",
- "xo": "^0.25.3"
- }
-}
diff --git a/deps/npm/node_modules/brace-expansion/index.js b/deps/npm/node_modules/brace-expansion/index.js
index 6c06dafcfb6e05..254ca75dd9aba2 100644
--- a/deps/npm/node_modules/brace-expansion/index.js
+++ b/deps/npm/node_modules/brace-expansion/index.js
@@ -116,7 +116,7 @@ function expand(str, isTop) {
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
- if (m.post.match(/,.*\}/)) {
+ if (m.post.match(/,(?!,).*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand(str);
}
diff --git a/deps/npm/node_modules/brace-expansion/package.json b/deps/npm/node_modules/brace-expansion/package.json
index 7097d41e39de5d..c7eee34511002a 100644
--- a/deps/npm/node_modules/brace-expansion/package.json
+++ b/deps/npm/node_modules/brace-expansion/package.json
@@ -1,7 +1,7 @@
{
"name": "brace-expansion",
"description": "Brace expansion as known from sh/bash",
- "version": "2.0.1",
+ "version": "2.0.2",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/brace-expansion.git"
@@ -42,5 +42,8 @@
"iphone/6.0..latest",
"android-browser/4.2..latest"
]
+ },
+ "publishConfig": {
+ "tag": "2.x"
}
}
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE b/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9ea..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index ad65eef0495076..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,352 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const zlib_1 = __importDefault(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new zlib_1.default[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = buffer_1.Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- buffer_1.Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- buffer_1.Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- buffer_1.Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
- (0, assert_1.default)(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index a6269b505f47cc..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,333 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new realZlib[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- assert(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- assert(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-export class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
- opts.fullFlushFlag = constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants.Z_SYNC_FLUSH);
- assert(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-export class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-export class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-export class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-export class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-export class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-export class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-export class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/package.json
deleted file mode 100644
index e94623ff43d353..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "name": "minizlib",
- "version": "3.0.1",
- "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "main": "./dist/commonjs/index.js",
- "dependencies": {
- "minipass": "^7.0.4",
- "rimraf": "^5.0.5"
- },
- "scripts": {
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "test": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --loglevel warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minizlib.git"
- },
- "keywords": [
- "zlib",
- "gzip",
- "gunzip",
- "deflate",
- "inflate",
- "compression",
- "zip",
- "unzip"
- ],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "MIT",
- "devDependencies": {
- "@types/node": "^20.11.29",
- "mkdirp": "^3.0.1",
- "tap": "^18.7.1",
- "tshy": "^1.12.0",
- "typedoc": "^0.25.12"
- },
- "files": [
- "dist"
- ],
- "engines": {
- "node": ">= 18"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 75,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- }
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/index.js b/deps/npm/node_modules/cacache/node_modules/p-map/index.js
deleted file mode 100644
index 2f7d91ccca4eda..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/p-map/index.js
+++ /dev/null
@@ -1,269 +0,0 @@
-export default async function pMap(
- iterable,
- mapper,
- {
- concurrency = Number.POSITIVE_INFINITY,
- stopOnError = true,
- signal,
- } = {},
-) {
- return new Promise((resolve, reject_) => {
- if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) {
- throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`);
- }
-
- if (typeof mapper !== 'function') {
- throw new TypeError('Mapper function is required');
- }
-
- if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) {
- throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
- }
-
- const result = [];
- const errors = [];
- const skippedIndexesMap = new Map();
- let isRejected = false;
- let isResolved = false;
- let isIterableDone = false;
- let resolvingCount = 0;
- let currentIndex = 0;
- const iterator = iterable[Symbol.iterator] === undefined ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
-
- const reject = reason => {
- isRejected = true;
- isResolved = true;
- reject_(reason);
- };
-
- if (signal) {
- if (signal.aborted) {
- reject(signal.reason);
- }
-
- signal.addEventListener('abort', () => {
- reject(signal.reason);
- });
- }
-
- const next = async () => {
- if (isResolved) {
- return;
- }
-
- const nextItem = await iterator.next();
-
- const index = currentIndex;
- currentIndex++;
-
- // Note: `iterator.next()` can be called many times in parallel.
- // This can cause multiple calls to this `next()` function to
- // receive a `nextItem` with `done === true`.
- // The shutdown logic that rejects/resolves must be protected
- // so it runs only one time as the `skippedIndex` logic is
- // non-idempotent.
- if (nextItem.done) {
- isIterableDone = true;
-
- if (resolvingCount === 0 && !isResolved) {
- if (!stopOnError && errors.length > 0) {
- reject(new AggregateError(errors)); // eslint-disable-line unicorn/error-message
- return;
- }
-
- isResolved = true;
-
- if (skippedIndexesMap.size === 0) {
- resolve(result);
- return;
- }
-
- const pureResult = [];
-
- // Support multiple `pMapSkip`'s.
- for (const [index, value] of result.entries()) {
- if (skippedIndexesMap.get(index) === pMapSkip) {
- continue;
- }
-
- pureResult.push(value);
- }
-
- resolve(pureResult);
- }
-
- return;
- }
-
- resolvingCount++;
-
- // Intentionally detached
- (async () => {
- try {
- const element = await nextItem.value;
-
- if (isResolved) {
- return;
- }
-
- const value = await mapper(element, index);
-
- // Use Map to stage the index of the element.
- if (value === pMapSkip) {
- skippedIndexesMap.set(index, value);
- }
-
- result[index] = value;
-
- resolvingCount--;
- await next();
- } catch (error) {
- if (stopOnError) {
- reject(error);
- } else {
- errors.push(error);
- resolvingCount--;
-
- // In that case we can't really continue regardless of `stopOnError` state
- // since an iterable is likely to continue throwing after it throws once.
- // If we continue calling `next()` indefinitely we will likely end up
- // in an infinite loop of failed iteration.
- try {
- await next();
- } catch (error) {
- reject(error);
- }
- }
- }
- })();
- };
-
- // Create the concurrent runners in a detached (non-awaited)
- // promise. We need this so we can await the `next()` calls
- // to stop creating runners before hitting the concurrency limit
- // if the iterable has already been marked as done.
- // NOTE: We *must* do this for async iterators otherwise we'll spin up
- // infinite `next()` calls by default and never start the event loop.
- (async () => {
- for (let index = 0; index < concurrency; index++) {
- try {
- // eslint-disable-next-line no-await-in-loop
- await next();
- } catch (error) {
- reject(error);
- break;
- }
-
- if (isIterableDone || isRejected) {
- break;
- }
- }
- })();
- });
-}
-
-export function pMapIterable(
- iterable,
- mapper,
- {
- concurrency = Number.POSITIVE_INFINITY,
- backpressure = concurrency,
- } = {},
-) {
- if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) {
- throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`);
- }
-
- if (typeof mapper !== 'function') {
- throw new TypeError('Mapper function is required');
- }
-
- if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) {
- throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
- }
-
- if (!((Number.isSafeInteger(backpressure) && backpressure >= concurrency) || backpressure === Number.POSITIVE_INFINITY)) {
- throw new TypeError(`Expected \`backpressure\` to be an integer from \`concurrency\` (${concurrency}) and up or \`Infinity\`, got \`${backpressure}\` (${typeof backpressure})`);
- }
-
- return {
- async * [Symbol.asyncIterator]() {
- const iterator = iterable[Symbol.asyncIterator] === undefined ? iterable[Symbol.iterator]() : iterable[Symbol.asyncIterator]();
-
- const promises = [];
- let runningMappersCount = 0;
- let isDone = false;
- let index = 0;
-
- function trySpawn() {
- if (isDone || !(runningMappersCount < concurrency && promises.length < backpressure)) {
- return;
- }
-
- const promise = (async () => {
- const {done, value} = await iterator.next();
-
- if (done) {
- return {done: true};
- }
-
- runningMappersCount++;
-
- // Spawn if still below concurrency and backpressure limit
- trySpawn();
-
- try {
- const returnValue = await mapper(await value, index++);
-
- runningMappersCount--;
-
- if (returnValue === pMapSkip) {
- const index = promises.indexOf(promise);
-
- if (index > 0) {
- promises.splice(index, 1);
- }
- }
-
- // Spawn if still below backpressure limit and just dropped below concurrency limit
- trySpawn();
-
- return {done: false, value: returnValue};
- } catch (error) {
- isDone = true;
- return {error};
- }
- })();
-
- promises.push(promise);
- }
-
- trySpawn();
-
- while (promises.length > 0) {
- const {error, done, value} = await promises[0]; // eslint-disable-line no-await-in-loop
-
- promises.shift();
-
- if (error) {
- throw error;
- }
-
- if (done) {
- return;
- }
-
- // Spawn if just dropped below backpressure limit and below the concurrency limit
- trySpawn();
-
- if (value === pMapSkip) {
- continue;
- }
-
- yield value;
- }
- },
- };
-}
-
-export const pMapSkip = Symbol('skip');
diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/license b/deps/npm/node_modules/cacache/node_modules/p-map/license
deleted file mode 100644
index fa7ceba3eb4a96..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/p-map/license
+++ /dev/null
@@ -1,9 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus (https://sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/package.json b/deps/npm/node_modules/cacache/node_modules/p-map/package.json
deleted file mode 100644
index ea58f599f3a035..00000000000000
--- a/deps/npm/node_modules/cacache/node_modules/p-map/package.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
- "name": "p-map",
- "version": "7.0.2",
- "description": "Map over promises concurrently",
- "license": "MIT",
- "repository": "sindresorhus/p-map",
- "funding": "https://github.com/sponsors/sindresorhus",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "https://sindresorhus.com"
- },
- "type": "module",
- "exports": {
- "types": "./index.d.ts",
- "default": "./index.js"
- },
- "sideEffects": false,
- "engines": {
- "node": ">=18"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "keywords": [
- "promise",
- "map",
- "resolved",
- "wait",
- "collection",
- "iterable",
- "iterator",
- "race",
- "fulfilled",
- "async",
- "await",
- "promises",
- "concurrently",
- "concurrency",
- "parallel",
- "bluebird"
- ],
- "devDependencies": {
- "ava": "^5.2.0",
- "chalk": "^5.3.0",
- "delay": "^6.0.0",
- "in-range": "^3.0.0",
- "random-int": "^3.0.0",
- "time-span": "^5.1.0",
- "tsd": "^0.29.0",
- "xo": "^0.56.0"
- }
-}
diff --git a/deps/npm/node_modules/chalk/package.json b/deps/npm/node_modules/chalk/package.json
index 3c500105bcbf25..23b4ce33dc6677 100644
--- a/deps/npm/node_modules/chalk/package.json
+++ b/deps/npm/node_modules/chalk/package.json
@@ -1,6 +1,6 @@
{
"name": "chalk",
- "version": "5.3.0",
+ "version": "5.4.1",
"description": "Terminal string styling done right",
"license": "MIT",
"repository": "chalk/chalk",
@@ -16,6 +16,7 @@
}
},
"types": "./source/index.d.ts",
+ "sideEffects": false,
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
@@ -58,10 +59,9 @@
"log-update": "^5.0.0",
"matcha": "^0.7.0",
"tsd": "^0.19.0",
- "xo": "^0.53.0",
+ "xo": "^0.57.0",
"yoctodelay": "^2.0.0"
},
- "sideEffects": false,
"xo": {
"rules": {
"unicorn/prefer-string-slice": "off",
diff --git a/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js b/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js
index 9fa6888f10288e..fbb6ce0fc9ab91 100644
--- a/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js
+++ b/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js
@@ -1,14 +1,18 @@
/* eslint-env browser */
const level = (() => {
- if (navigator.userAgentData) {
+ if (!('navigator' in globalThis)) {
+ return 0;
+ }
+
+ if (globalThis.navigator.userAgentData) {
const brand = navigator.userAgentData.brands.find(({brand}) => brand === 'Chromium');
if (brand && brand.version > 93) {
return 3;
}
}
- if (/\b(Chrome|Chromium)\//.test(navigator.userAgent)) {
+ if (/\b(Chrome|Chromium)\//.test(globalThis.navigator.userAgent)) {
return 1;
}
diff --git a/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
index 4ce0a2da8d2242..1388372674d494 100644
--- a/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
+++ b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
@@ -112,11 +112,11 @@ function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
}
if ('CI' in env) {
- if ('GITHUB_ACTIONS' in env || 'GITEA_ACTIONS' in env) {
+ if (['GITHUB_ACTIONS', 'GITEA_ACTIONS', 'CIRCLECI'].some(key => key in env)) {
return 3;
}
- if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ if (['TRAVIS', 'APPVEYOR', 'GITLAB_CI', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
return 1;
}
diff --git a/deps/npm/node_modules/ci-info/index.js b/deps/npm/node_modules/ci-info/index.js
index 9eba6940c4147e..9cd162991a02e0 100644
--- a/deps/npm/node_modules/ci-info/index.js
+++ b/deps/npm/node_modules/ci-info/index.js
@@ -36,7 +36,7 @@ exports.isCI = !!(
env.CI !== 'false' && // Bypass all checks if CI env is explicitly set to 'false'
(env.BUILD_ID || // Jenkins, Cloudbees
env.BUILD_NUMBER || // Jenkins, TeamCity
- env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari
+ env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari, Cloudflare Pages
env.CI_APP_ID || // Appflow
env.CI_BUILD_ID || // Appflow
env.CI_BUILD_NUMBER || // Appflow
diff --git a/deps/npm/node_modules/ci-info/package.json b/deps/npm/node_modules/ci-info/package.json
index 156329d2ce379c..fc7e8999ea3e52 100644
--- a/deps/npm/node_modules/ci-info/package.json
+++ b/deps/npm/node_modules/ci-info/package.json
@@ -1,12 +1,13 @@
{
"name": "ci-info",
- "version": "4.1.0",
+ "version": "4.2.0",
"description": "Get details about the current Continuous Integration environment",
"main": "index.js",
"typings": "index.d.ts",
+ "type": "commonjs",
"author": "Thomas Watson Steen (https://twitter.com/wa7son)",
"license": "MIT",
- "repository": "https://github.com/watson/ci-info.git",
+ "repository": "github:watson/ci-info",
"bugs": "https://github.com/watson/ci-info/issues",
"homepage": "https://github.com/watson/ci-info",
"contributors": [
@@ -41,7 +42,8 @@
},
"devDependencies": {
"clear-module": "^4.1.2",
- "husky": "^9.1.6",
+ "husky": "^9.1.7",
+ "publint": "^0.3.8",
"standard": "^17.1.2",
"tape": "^5.9.0"
},
diff --git a/deps/npm/node_modules/ci-info/vendors.json b/deps/npm/node_modules/ci-info/vendors.json
index 64d5924d1a557e..0c47fa99caef2a 100644
--- a/deps/npm/node_modules/ci-info/vendors.json
+++ b/deps/npm/node_modules/ci-info/vendors.json
@@ -85,6 +85,11 @@
"env": "CIRRUS_CI",
"pr": "CIRRUS_PR"
},
+ {
+ "name": "Cloudflare Pages",
+ "constant": "CLOUDFLARE_PAGES",
+ "env": "CF_PAGES"
+ },
{
"name": "Codefresh",
"constant": "CODEFRESH",
diff --git a/deps/npm/node_modules/cidr-regex/package.json b/deps/npm/node_modules/cidr-regex/package.json
index 88b8297b02473e..815837e9a3786a 100644
--- a/deps/npm/node_modules/cidr-regex/package.json
+++ b/deps/npm/node_modules/cidr-regex/package.json
@@ -1,6 +1,6 @@
{
"name": "cidr-regex",
- "version": "4.1.1",
+ "version": "4.1.3",
"description": "Regular expression for matching IP addresses in CIDR notation",
"author": "silverwind ",
"contributors": [
@@ -23,18 +23,17 @@
"ip-regex": "^5.0.0"
},
"devDependencies": {
- "@types/node": "20.12.12",
+ "@types/node": "22.13.4",
"eslint": "8.57.0",
- "eslint-config-silverwind": "85.1.4",
- "eslint-config-silverwind-typescript": "3.2.7",
- "typescript": "5.4.5",
- "typescript-config-silverwind": "4.3.2",
- "updates": "16.1.1",
- "versions": "12.0.2",
- "vite": "5.2.11",
- "vite-config-silverwind": "1.1.2",
- "vite-plugin-dts": "3.9.1",
- "vitest": "1.6.0",
- "vitest-config-silverwind": "9.0.6"
+ "eslint-config-silverwind": "99.0.0",
+ "eslint-config-silverwind-typescript": "9.2.2",
+ "typescript": "5.7.3",
+ "typescript-config-silverwind": "8.0.0",
+ "updates": "16.4.2",
+ "versions": "12.1.3",
+ "vite": "6.1.0",
+ "vite-config-silverwind": "4.0.0",
+ "vitest": "3.0.5",
+ "vitest-config-silverwind": "10.0.0"
}
}
diff --git a/deps/npm/node_modules/clean-stack/index.js b/deps/npm/node_modules/clean-stack/index.js
deleted file mode 100644
index 8c1dcc4cd02a25..00000000000000
--- a/deps/npm/node_modules/clean-stack/index.js
+++ /dev/null
@@ -1,40 +0,0 @@
-'use strict';
-const os = require('os');
-
-const extractPathRegex = /\s+at.*(?:\(|\s)(.*)\)?/;
-const pathRegex = /^(?:(?:(?:node|(?:internal\/[\w/]*|.*node_modules\/(?:babel-polyfill|pirates)\/.*)?\w+)\.js:\d+:\d+)|native)/;
-const homeDir = typeof os.homedir === 'undefined' ? '' : os.homedir();
-
-module.exports = (stack, options) => {
- options = Object.assign({pretty: false}, options);
-
- return stack.replace(/\\/g, '/')
- .split('\n')
- .filter(line => {
- const pathMatches = line.match(extractPathRegex);
- if (pathMatches === null || !pathMatches[1]) {
- return true;
- }
-
- const match = pathMatches[1];
-
- // Electron
- if (
- match.includes('.app/Contents/Resources/electron.asar') ||
- match.includes('.app/Contents/Resources/default_app.asar')
- ) {
- return false;
- }
-
- return !pathRegex.test(match);
- })
- .filter(line => line.trim() !== '')
- .map(line => {
- if (options.pretty) {
- return line.replace(extractPathRegex, (m, p1) => m.replace(p1, p1.replace(homeDir, '~')));
- }
-
- return line;
- })
- .join('\n');
-};
diff --git a/deps/npm/node_modules/clean-stack/package.json b/deps/npm/node_modules/clean-stack/package.json
deleted file mode 100644
index 719fdff55e7b6c..00000000000000
--- a/deps/npm/node_modules/clean-stack/package.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "name": "clean-stack",
- "version": "2.2.0",
- "description": "Clean up error stack traces",
- "license": "MIT",
- "repository": "sindresorhus/clean-stack",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "engines": {
- "node": ">=6"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "keywords": [
- "clean",
- "stack",
- "trace",
- "traces",
- "error",
- "err",
- "electron"
- ],
- "devDependencies": {
- "ava": "^1.4.1",
- "tsd": "^0.7.2",
- "xo": "^0.24.0"
- },
- "browser": {
- "os": false
- }
-}
diff --git a/deps/npm/node_modules/debug/package.json b/deps/npm/node_modules/debug/package.json
index 2f782eb9aef450..afc2f8b615b222 100644
--- a/deps/npm/node_modules/debug/package.json
+++ b/deps/npm/node_modules/debug/package.json
@@ -1,6 +1,6 @@
{
"name": "debug",
- "version": "4.3.7",
+ "version": "4.4.1",
"repository": {
"type": "git",
"url": "git://github.com/debug-js/debug.git"
@@ -26,7 +26,7 @@
"scripts": {
"lint": "xo",
"test": "npm run test:node && npm run test:browser && npm run lint",
- "test:node": "istanbul cover _mocha -- test.js test.node.js",
+ "test:node": "mocha test.js test.node.js",
"test:browser": "karma start --single-run",
"test:coverage": "cat ./coverage/lcov.info | coveralls"
},
@@ -37,7 +37,6 @@
"brfs": "^2.0.1",
"browserify": "^16.2.3",
"coveralls": "^3.0.2",
- "istanbul": "^0.4.5",
"karma": "^3.1.4",
"karma-browserify": "^6.0.0",
"karma-chrome-launcher": "^2.2.0",
@@ -56,5 +55,10 @@
"browser": "./src/browser.js",
"engines": {
"node": ">=6.0"
+ },
+ "xo": {
+ "rules": {
+ "import/extensions": "off"
+ }
}
}
diff --git a/deps/npm/node_modules/debug/src/browser.js b/deps/npm/node_modules/debug/src/browser.js
index 8d808e5889da5f..5993451b82e6b2 100644
--- a/deps/npm/node_modules/debug/src/browser.js
+++ b/deps/npm/node_modules/debug/src/browser.js
@@ -129,6 +129,7 @@ function useColors() {
// Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
+ // eslint-disable-next-line no-return-assign
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// Is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
@@ -218,7 +219,7 @@ function save(namespaces) {
function load() {
let r;
try {
- r = exports.storage.getItem('debug');
+ r = exports.storage.getItem('debug') || exports.storage.getItem('DEBUG') ;
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
diff --git a/deps/npm/node_modules/debug/src/common.js b/deps/npm/node_modules/debug/src/common.js
index e3291b20faa1a6..141cb578b77223 100644
--- a/deps/npm/node_modules/debug/src/common.js
+++ b/deps/npm/node_modules/debug/src/common.js
@@ -166,24 +166,62 @@ function setup(env) {
createDebug.names = [];
createDebug.skips = [];
- let i;
- const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
- const len = split.length;
-
- for (i = 0; i < len; i++) {
- if (!split[i]) {
- // ignore empty strings
- continue;
+ const split = (typeof namespaces === 'string' ? namespaces : '')
+ .trim()
+ .replace(/\s+/g, ',')
+ .split(',')
+ .filter(Boolean);
+
+ for (const ns of split) {
+ if (ns[0] === '-') {
+ createDebug.skips.push(ns.slice(1));
+ } else {
+ createDebug.names.push(ns);
}
+ }
+ }
- namespaces = split[i].replace(/\*/g, '.*?');
-
- if (namespaces[0] === '-') {
- createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
+ /**
+ * Checks if the given string matches a namespace template, honoring
+ * asterisks as wildcards.
+ *
+ * @param {String} search
+ * @param {String} template
+ * @return {Boolean}
+ */
+ function matchesTemplate(search, template) {
+ let searchIndex = 0;
+ let templateIndex = 0;
+ let starIndex = -1;
+ let matchIndex = 0;
+
+ while (searchIndex < search.length) {
+ if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === '*')) {
+ // Match character or proceed with wildcard
+ if (template[templateIndex] === '*') {
+ starIndex = templateIndex;
+ matchIndex = searchIndex;
+ templateIndex++; // Skip the '*'
+ } else {
+ searchIndex++;
+ templateIndex++;
+ }
+ } else if (starIndex !== -1) { // eslint-disable-line no-negated-condition
+ // Backtrack to the last '*' and try to match more characters
+ templateIndex = starIndex + 1;
+ matchIndex++;
+ searchIndex = matchIndex;
} else {
- createDebug.names.push(new RegExp('^' + namespaces + '$'));
+ return false; // No match
}
}
+
+ // Handle trailing '*' in template
+ while (templateIndex < template.length && template[templateIndex] === '*') {
+ templateIndex++;
+ }
+
+ return templateIndex === template.length;
}
/**
@@ -194,8 +232,8 @@ function setup(env) {
*/
function disable() {
const namespaces = [
- ...createDebug.names.map(toNamespace),
- ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
+ ...createDebug.names,
+ ...createDebug.skips.map(namespace => '-' + namespace)
].join(',');
createDebug.enable('');
return namespaces;
@@ -209,21 +247,14 @@ function setup(env) {
* @api public
*/
function enabled(name) {
- if (name[name.length - 1] === '*') {
- return true;
- }
-
- let i;
- let len;
-
- for (i = 0, len = createDebug.skips.length; i < len; i++) {
- if (createDebug.skips[i].test(name)) {
+ for (const skip of createDebug.skips) {
+ if (matchesTemplate(name, skip)) {
return false;
}
}
- for (i = 0, len = createDebug.names.length; i < len; i++) {
- if (createDebug.names[i].test(name)) {
+ for (const ns of createDebug.names) {
+ if (matchesTemplate(name, ns)) {
return true;
}
}
@@ -231,19 +262,6 @@ function setup(env) {
return false;
}
- /**
- * Convert regexp to namespace
- *
- * @param {RegExp} regxep
- * @return {String} namespace
- * @api private
- */
- function toNamespace(regexp) {
- return regexp.toString()
- .substring(2, regexp.toString().length - 2)
- .replace(/\.\*\?$/, '*');
- }
-
/**
* Coerce `val`.
*
diff --git a/deps/npm/node_modules/exponential-backoff/LICENSE b/deps/npm/node_modules/exponential-backoff/LICENSE
index 7a4a3ea2424c09..4be46a90670d82 100644
--- a/deps/npm/node_modules/exponential-backoff/LICENSE
+++ b/deps/npm/node_modules/exponential-backoff/LICENSE
@@ -187,7 +187,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
- Copyright [yyyy] [name of copyright owner]
+ Copyright 2019 Coveo Solutions Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -199,4 +199,4 @@
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
- limitations under the License.
\ No newline at end of file
+ limitations under the License.
diff --git a/deps/npm/node_modules/exponential-backoff/dist/backoff.js b/deps/npm/node_modules/exponential-backoff/dist/backoff.js
index a0aa0dc34b6b14..6a1b6bd3835ac9 100644
--- a/deps/npm/node_modules/exponential-backoff/dist/backoff.js
+++ b/deps/npm/node_modules/exponential-backoff/dist/backoff.js
@@ -38,6 +38,12 @@ var __generator = (this && this.__generator) || function (thisArg, body) {
Object.defineProperty(exports, "__esModule", { value: true });
var options_1 = require("./options");
var delay_factory_1 = require("./delay/delay.factory");
+/**
+ * Executes a function with exponential backoff.
+ * @param request the function to be executed
+ * @param options options to customize the backoff behavior
+ * @returns Promise that resolves to the result of the `request` function
+ */
function backOff(request, options) {
if (options === void 0) { options = {}; }
return __awaiter(this, void 0, void 0, function () {
diff --git a/deps/npm/node_modules/exponential-backoff/package.json b/deps/npm/node_modules/exponential-backoff/package.json
index 23232a0df2c572..53fb159f827828 100644
--- a/deps/npm/node_modules/exponential-backoff/package.json
+++ b/deps/npm/node_modules/exponential-backoff/package.json
@@ -1,9 +1,10 @@
{
"name": "exponential-backoff",
- "version": "3.1.1",
+ "version": "3.1.2",
"description": "A utility that allows retrying a function with an exponential delay between attempts.",
"files": [
- "dist/"
+ "dist/",
+ "src/"
],
"main": "dist/backoff.js",
"types": "dist/backoff.d.ts",
@@ -35,7 +36,7 @@
},
"repository": {
"type": "git",
- "url": "git+https://github.com/coveo/exponential-backoff.git"
+ "url": "git+https://github.com/coveooss/exponential-backoff.git"
},
"keywords": [
"exponential",
@@ -45,9 +46,9 @@
"author": "Sami Sayegh",
"license": "Apache-2.0",
"bugs": {
- "url": "https://github.com/coveo/exponential-backoff/issues"
+ "url": "https://github.com/coveooss/exponential-backoff/issues"
},
- "homepage": "https://github.com/coveo/exponential-backoff#readme",
+ "homepage": "https://github.com/coveooss/exponential-backoff#readme",
"devDependencies": {
"@types/jest": "^24.0.18",
"@types/node": "^10.14.21",
diff --git a/deps/npm/node_modules/foreground-child/dist/commonjs/index.js b/deps/npm/node_modules/foreground-child/dist/commonjs/index.js
index 07a01c5830de40..6db65c65dca62d 100644
--- a/deps/npm/node_modules/foreground-child/dist/commonjs/index.js
+++ b/deps/npm/node_modules/foreground-child/dist/commonjs/index.js
@@ -3,7 +3,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.foregroundChild = exports.normalizeFgArgs = void 0;
+exports.normalizeFgArgs = void 0;
+exports.foregroundChild = foregroundChild;
const child_process_1 = require("child_process");
const cross_spawn_1 = __importDefault(require("cross-spawn"));
const signal_exit_1 = require("signal-exit");
@@ -118,6 +119,5 @@ function foregroundChild(...fgArgs) {
}
return child;
}
-exports.foregroundChild = foregroundChild;
const isPromise = (o) => !!o && typeof o === 'object' && typeof o.then === 'function';
//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/foreground-child/package.json b/deps/npm/node_modules/foreground-child/package.json
index 980b7e85d15426..75f5b9969b282b 100644
--- a/deps/npm/node_modules/foreground-child/package.json
+++ b/deps/npm/node_modules/foreground-child/package.json
@@ -1,30 +1,26 @@
{
"name": "foreground-child",
- "version": "3.3.0",
+ "version": "3.3.1",
"description": "Run a child as if it's the foreground process. Give it stdio. Exit when it exits.",
"main": "./dist/commonjs/index.js",
"types": "./dist/commonjs/index.d.ts",
"exports": {
"./watchdog": {
"import": {
- "source": "./src/watchdog.ts",
"types": "./dist/esm/watchdog.d.ts",
"default": "./dist/esm/watchdog.js"
},
"require": {
- "source": "./src/watchdog.ts",
"types": "./dist/commonjs/watchdog.d.ts",
"default": "./dist/commonjs/watchdog.js"
}
},
"./proxy-signals": {
"import": {
- "source": "./src/proxy-signals.ts",
"types": "./dist/esm/proxy-signals.d.ts",
"default": "./dist/esm/proxy-signals.js"
},
"require": {
- "source": "./src/proxy-signals.ts",
"types": "./dist/commonjs/proxy-signals.d.ts",
"default": "./dist/commonjs/proxy-signals.js"
}
@@ -32,12 +28,10 @@
"./package.json": "./package.json",
".": {
"import": {
- "source": "./src/index.ts",
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.js"
},
"require": {
- "source": "./src/index.ts",
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.js"
}
@@ -50,7 +44,7 @@
"node": ">=14"
},
"dependencies": {
- "cross-spawn": "^7.0.0",
+ "cross-spawn": "^7.0.6",
"signal-exit": "^4.0.1"
},
"scripts": {
@@ -91,8 +85,8 @@
"@types/node": "^18.15.11",
"@types/tap": "^15.0.8",
"prettier": "^3.3.2",
- "tap": "^19.2.5",
- "tshy": "^1.15.1",
+ "tap": "^21.1.0",
+ "tshy": "^3.0.2",
"typedoc": "^0.24.2",
"typescript": "^5.0.2"
},
@@ -107,5 +101,6 @@
".": "./src/index.ts"
}
},
- "type": "module"
+ "type": "module",
+ "module": "./dist/esm/index.js"
}
diff --git a/deps/npm/node_modules/hosted-git-info/lib/index.js b/deps/npm/node_modules/hosted-git-info/lib/index.js
index 0c9d0b08c866b5..2a7100dcee6e78 100644
--- a/deps/npm/node_modules/hosted-git-info/lib/index.js
+++ b/deps/npm/node_modules/hosted-git-info/lib/index.js
@@ -7,6 +7,26 @@ const parseUrl = require('./parse-url.js')
const cache = new LRUCache({ max: 1000 })
+function unknownHostedUrl (url) {
+ try {
+ const {
+ protocol,
+ hostname,
+ pathname,
+ } = new URL(url)
+
+ if (!hostname) {
+ return null
+ }
+
+ const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
+ const path = pathname.replace(/\.git$/, '')
+ return `${proto}//${hostname}${path}`
+ } catch {
+ return null
+ }
+}
+
class GitHost {
constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
Object.assign(this, GitHost.#gitHosts[type], {
@@ -56,6 +76,34 @@ class GitHost {
return cache.get(key)
}
+ static fromManifest (manifest, opts = {}) {
+ if (!manifest || typeof manifest !== 'object') {
+ return
+ }
+
+ const r = manifest.repository
+ // TODO: look into also checking the `bugs`/`homepage` URLs
+
+ const rurl = r && (
+ typeof r === 'string'
+ ? r
+ : typeof r === 'object' && typeof r.url === 'string'
+ ? r.url
+ : null
+ )
+
+ if (!rurl) {
+ throw new Error('no repository')
+ }
+
+ const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
+ if (info) {
+ return info
+ }
+ const unk = unknownHostedUrl(rurl)
+ return GitHost.fromUrl(unk, opts) || unk
+ }
+
static parseUrl (url) {
return parseUrl(url)
}
diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json
index 78356159af7723..a9bb26be4a7044 100644
--- a/deps/npm/node_modules/hosted-git-info/package.json
+++ b/deps/npm/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
{
"name": "hosted-git-info",
- "version": "8.0.2",
+ "version": "8.1.0",
"description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
"main": "./lib/index.js",
"repository": {
@@ -35,7 +35,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.4",
+ "@npmcli/template-oss": "4.24.3",
"tap": "^16.0.1"
},
"files": [
@@ -55,7 +55,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.4",
+ "version": "4.24.3",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/http-cache-semantics/index.js b/deps/npm/node_modules/http-cache-semantics/index.js
index 31fba4860024cd..f01e9a16b4781d 100644
--- a/deps/npm/node_modules/http-cache-semantics/index.js
+++ b/deps/npm/node_modules/http-cache-semantics/index.js
@@ -1,5 +1,22 @@
'use strict';
-// rfc7231 6.1
+
+/**
+ * @typedef {Object} HttpRequest
+ * @property {Record} headers - Request headers
+ * @property {string} [method] - HTTP method
+ * @property {string} [url] - Request URL
+ */
+
+/**
+ * @typedef {Object} HttpResponse
+ * @property {Record} headers - Response headers
+ * @property {number} [status] - HTTP status code
+ */
+
+/**
+ * Set of default cacheable status codes per RFC 7231 section 6.1.
+ * @type {Set}
+ */
const statusCodeCacheableByDefault = new Set([
200,
203,
@@ -15,7 +32,11 @@ const statusCodeCacheableByDefault = new Set([
501,
]);
-// This implementation does not understand partial responses (206)
+/**
+ * Set of HTTP status codes that the cache implementation understands.
+ * Note: This implementation does not understand partial responses (206).
+ * @type {Set}
+ */
const understoodStatuses = new Set([
200,
203,
@@ -33,13 +54,21 @@ const understoodStatuses = new Set([
501,
]);
+/**
+ * Set of HTTP error status codes.
+ * @type {Set}
+ */
const errorStatusCodes = new Set([
500,
502,
- 503,
+ 503,
504,
]);
+/**
+ * Object representing hop-by-hop headers that should be removed.
+ * @type {Record}
+ */
const hopByHopHeaders = {
date: true, // included, because we add Age update Date
connection: true,
@@ -52,6 +81,10 @@ const hopByHopHeaders = {
upgrade: true,
};
+/**
+ * Headers that are excluded from revalidation update.
+ * @type {Record}
+ */
const excludedFromRevalidationUpdate = {
// Since the old body is reused, it doesn't make sense to change properties of the body
'content-length': true,
@@ -60,21 +93,37 @@ const excludedFromRevalidationUpdate = {
'content-range': true,
};
+/**
+ * Converts a string to a number or returns zero if the conversion fails.
+ * @param {string} s - The string to convert.
+ * @returns {number} The parsed number or 0.
+ */
function toNumberOrZero(s) {
const n = parseInt(s, 10);
return isFinite(n) ? n : 0;
}
-// RFC 5861
+/**
+ * Determines if the given response is an error response.
+ * Implements RFC 5861 behavior.
+ * @param {HttpResponse|undefined} response - The HTTP response object.
+ * @returns {boolean} true if the response is an error or undefined, false otherwise.
+ */
function isErrorResponse(response) {
// consider undefined response as faulty
- if(!response) {
- return true
+ if (!response) {
+ return true;
}
return errorStatusCodes.has(response.status);
}
+/**
+ * Parses a Cache-Control header string into an object.
+ * @param {string} [header] - The Cache-Control header value.
+ * @returns {Record} An object representing Cache-Control directives.
+ */
function parseCacheControl(header) {
+ /** @type {Record} */
const cc = {};
if (!header) return cc;
@@ -89,6 +138,11 @@ function parseCacheControl(header) {
return cc;
}
+/**
+ * Formats a Cache-Control directives object into a header string.
+ * @param {Record} cc - The Cache-Control directives.
+ * @returns {string|undefined} A formatted Cache-Control header string or undefined if empty.
+ */
function formatCacheControl(cc) {
let parts = [];
for (const k in cc) {
@@ -102,6 +156,17 @@ function formatCacheControl(cc) {
}
module.exports = class CachePolicy {
+ /**
+ * Creates a new CachePolicy instance.
+ * @param {HttpRequest} req - Incoming client request.
+ * @param {HttpResponse} res - Received server response.
+ * @param {Object} [options={}] - Configuration options.
+ * @param {boolean} [options.shared=true] - Is the cache shared (a public proxy)? `false` for personal browser caches.
+ * @param {number} [options.cacheHeuristic=0.1] - Fallback heuristic (age fraction) for cache duration.
+ * @param {number} [options.immutableMinTimeToLive=86400000] - Minimum TTL for immutable responses in milliseconds.
+ * @param {boolean} [options.ignoreCargoCult=false] - Detect nonsense cache headers, and override them.
+ * @param {any} [options._fromObject] - Internal parameter for deserialization. Do not use.
+ */
constructor(
req,
res,
@@ -123,29 +188,44 @@ module.exports = class CachePolicy {
}
this._assertRequestHasHeaders(req);
+ /** @type {number} Timestamp when the response was received */
this._responseTime = this.now();
+ /** @type {boolean} Indicates if the cache is shared */
this._isShared = shared !== false;
+ /** @type {boolean} Indicates if legacy cargo cult directives should be ignored */
+ this._ignoreCargoCult = !!ignoreCargoCult;
+ /** @type {number} Heuristic cache fraction */
this._cacheHeuristic =
undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
+ /** @type {number} Minimum TTL for immutable responses in ms */
this._immutableMinTtl =
undefined !== immutableMinTimeToLive
? immutableMinTimeToLive
: 24 * 3600 * 1000;
+ /** @type {number} HTTP status code */
this._status = 'status' in res ? res.status : 200;
+ /** @type {Record} Response headers */
this._resHeaders = res.headers;
+ /** @type {Record} Parsed Cache-Control directives from response */
this._rescc = parseCacheControl(res.headers['cache-control']);
+ /** @type {string} HTTP method (e.g., GET, POST) */
this._method = 'method' in req ? req.method : 'GET';
+ /** @type {string} Request URL */
this._url = req.url;
+ /** @type {string} Host header from the request */
this._host = req.headers.host;
+ /** @type {boolean} Whether the request does not include an Authorization header */
this._noAuthorization = !req.headers.authorization;
+ /** @type {Record|null} Request headers used for Vary matching */
this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
+ /** @type {Record} Parsed Cache-Control directives from request */
this._reqcc = parseCacheControl(req.headers['cache-control']);
// Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
// so there's no point stricly adhering to the blindly copy&pasted directives.
if (
- ignoreCargoCult &&
+ this._ignoreCargoCult &&
'pre-check' in this._rescc &&
'post-check' in this._rescc
) {
@@ -171,10 +251,18 @@ module.exports = class CachePolicy {
}
}
+ /**
+ * You can monkey-patch it for testing.
+ * @returns {number} Current time in milliseconds.
+ */
now() {
return Date.now();
}
+ /**
+ * Determines if the response is storable in a cache.
+ * @returns {boolean} `false` if can never be cached.
+ */
storable() {
// The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
return !!(
@@ -208,62 +296,160 @@ module.exports = class CachePolicy {
);
}
+ /**
+ * @returns {boolean} true if expiration is explicitly defined.
+ */
_hasExplicitExpiration() {
// 4.2.1 Calculating Freshness Lifetime
- return (
+ return !!(
(this._isShared && this._rescc['s-maxage']) ||
this._rescc['max-age'] ||
this._resHeaders.expires
);
}
+ /**
+ * @param {HttpRequest} req - a request
+ * @throws {Error} if the headers are missing.
+ */
_assertRequestHasHeaders(req) {
if (!req || !req.headers) {
throw Error('Request headers missing');
}
}
+ /**
+ * Checks if the request matches the cache and can be satisfied from the cache immediately,
+ * without having to make a request to the server.
+ *
+ * This doesn't support `stale-while-revalidate`. See `evaluateRequest()` for a more complete solution.
+ *
+ * @param {HttpRequest} req - The new incoming HTTP request.
+ * @returns {boolean} `true`` if the cached response used to construct this cache policy satisfies the request without revalidation.
+ */
satisfiesWithoutRevalidation(req) {
+ const result = this.evaluateRequest(req);
+ return !result.revalidation;
+ }
+
+ /**
+ * @param {{headers: Record, synchronous: boolean}|undefined} revalidation - Revalidation information, if any.
+ * @returns {{response: {headers: Record}, revalidation: {headers: Record, synchronous: boolean}|undefined}} An object with a cached response headers and revalidation info.
+ */
+ _evaluateRequestHitResult(revalidation) {
+ return {
+ response: {
+ headers: this.responseHeaders(),
+ },
+ revalidation,
+ };
+ }
+
+ /**
+ * @param {HttpRequest} request - new incoming
+ * @param {boolean} synchronous - whether revalidation must be synchronous (not s-w-r).
+ * @returns {{headers: Record, synchronous: boolean}} An object with revalidation headers and a synchronous flag.
+ */
+ _evaluateRequestRevalidation(request, synchronous) {
+ return {
+ synchronous,
+ headers: this.revalidationHeaders(request),
+ };
+ }
+
+ /**
+ * @param {HttpRequest} request - new incoming
+ * @returns {{response: undefined, revalidation: {headers: Record, synchronous: boolean}}} An object indicating no cached response and revalidation details.
+ */
+ _evaluateRequestMissResult(request) {
+ return {
+ response: undefined,
+ revalidation: this._evaluateRequestRevalidation(request, true),
+ };
+ }
+
+ /**
+ * Checks if the given request matches this cache entry, and how the cache can be used to satisfy it. Returns an object with:
+ *
+ * ```
+ * {
+ * // If defined, you must send a request to the server.
+ * revalidation: {
+ * headers: {}, // HTTP headers to use when sending the revalidation response
+ * // If true, you MUST wait for a response from the server before using the cache
+ * // If false, this is stale-while-revalidate. The cache is stale, but you can use it while you update it asynchronously.
+ * synchronous: bool,
+ * },
+ * // If defined, you can use this cached response.
+ * response: {
+ * headers: {}, // Updated cached HTTP headers you must use when responding to the client
+ * },
+ * }
+ * ```
+ * @param {HttpRequest} req - new incoming HTTP request
+ * @returns {{response: {headers: Record}|undefined, revalidation: {headers: Record, synchronous: boolean}|undefined}} An object containing keys:
+ * - revalidation: { headers: Record, synchronous: boolean } Set if you should send this to the origin server
+ * - response: { headers: Record } Set if you can respond to the client with these cached headers
+ */
+ evaluateRequest(req) {
this._assertRequestHasHeaders(req);
+ // In all circumstances, a cache MUST NOT ignore the must-revalidate directive
+ if (this._rescc['must-revalidate']) {
+ return this._evaluateRequestMissResult(req);
+ }
+
+ if (!this._requestMatches(req, false)) {
+ return this._evaluateRequestMissResult(req);
+ }
+
// When presented with a request, a cache MUST NOT reuse a stored response, unless:
// the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
// unless the stored response is successfully validated (Section 4.3), and
const requestCC = parseCacheControl(req.headers['cache-control']);
+
if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
- return false;
+ return this._evaluateRequestMissResult(req);
}
- if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
- return false;
+ if (requestCC['max-age'] && this.age() > toNumberOrZero(requestCC['max-age'])) {
+ return this._evaluateRequestMissResult(req);
}
- if (
- requestCC['min-fresh'] &&
- this.timeToLive() < 1000 * requestCC['min-fresh']
- ) {
- return false;
+ if (requestCC['min-fresh'] && this.maxAge() - this.age() < toNumberOrZero(requestCC['min-fresh'])) {
+ return this._evaluateRequestMissResult(req);
}
// the stored response is either:
// fresh, or allowed to be served stale
if (this.stale()) {
- const allowsStale =
- requestCC['max-stale'] &&
- !this._rescc['must-revalidate'] &&
- (true === requestCC['max-stale'] ||
- requestCC['max-stale'] > this.age() - this.maxAge());
- if (!allowsStale) {
- return false;
+ // If a value is present, then the client is willing to accept a response that has
+ // exceeded its freshness lifetime by no more than the specified number of seconds
+ const allowsStaleWithoutRevalidation = 'max-stale' in requestCC &&
+ (true === requestCC['max-stale'] || requestCC['max-stale'] > this.age() - this.maxAge());
+
+ if (allowsStaleWithoutRevalidation) {
+ return this._evaluateRequestHitResult(undefined);
+ }
+
+ if (this.useStaleWhileRevalidate()) {
+ return this._evaluateRequestHitResult(this._evaluateRequestRevalidation(req, false));
}
+
+ return this._evaluateRequestMissResult(req);
}
- return this._requestMatches(req, false);
+ return this._evaluateRequestHitResult(undefined);
}
+ /**
+ * @param {HttpRequest} req - check if this is for the same cache entry
+ * @param {boolean} allowHeadMethod - allow a HEAD method to match.
+ * @returns {boolean} `true` if the request matches.
+ */
_requestMatches(req, allowHeadMethod) {
// The presented effective request URI and that of the stored response match, and
- return (
+ return !!(
(!this._url || this._url === req.url) &&
this._host === req.headers.host &&
// the request method associated with the stored response allows it to be used for the presented request, and
@@ -275,15 +461,24 @@ module.exports = class CachePolicy {
);
}
+ /**
+ * Determines whether storing authenticated responses is allowed.
+ * @returns {boolean} `true` if allowed.
+ */
_allowsStoringAuthenticated() {
- // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
- return (
+ // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
+ return !!(
this._rescc['must-revalidate'] ||
this._rescc.public ||
this._rescc['s-maxage']
);
}
+ /**
+ * Checks whether the Vary header in the response matches the new request.
+ * @param {HttpRequest} req - incoming HTTP request
+ * @returns {boolean} `true` if the vary headers match.
+ */
_varyMatches(req) {
if (!this._resHeaders.vary) {
return true;
@@ -304,7 +499,13 @@ module.exports = class CachePolicy {
return true;
}
+ /**
+ * Creates a copy of the given headers without any hop-by-hop headers.
+ * @param {Record} inHeaders - old headers from the cached response
+ * @returns {Record} A new headers object without hop-by-hop headers.
+ */
_copyWithoutHopByHopHeaders(inHeaders) {
+ /** @type {Record} */
const headers = {};
for (const name in inHeaders) {
if (hopByHopHeaders[name]) continue;
@@ -330,6 +531,11 @@ module.exports = class CachePolicy {
return headers;
}
+ /**
+ * Returns the response headers adjusted for serving the cached response.
+ * Removes hop-by-hop headers and updates the Age and Date headers.
+ * @returns {Record} The adjusted response headers.
+ */
responseHeaders() {
const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
const age = this.age();
@@ -351,8 +557,8 @@ module.exports = class CachePolicy {
}
/**
- * Value of the Date response header or current time if Date was invalid
- * @return timestamp
+ * Returns the Date header value from the response or the current time if invalid.
+ * @returns {number} Timestamp (in milliseconds) representing the Date header or response time.
*/
date() {
const serverDate = Date.parse(this._resHeaders.date);
@@ -365,8 +571,7 @@ module.exports = class CachePolicy {
/**
* Value of the Age header, in seconds, updated for the current time.
* May be fractional.
- *
- * @return Number
+ * @returns {number} The age in seconds.
*/
age() {
let age = this._ageValue();
@@ -375,16 +580,21 @@ module.exports = class CachePolicy {
return age + residentTime;
}
+ /**
+ * @returns {number} The Age header value as a number.
+ */
_ageValue() {
return toNumberOrZero(this._resHeaders.age);
}
/**
- * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
+ * Possibly outdated value of applicable max-age (or heuristic equivalent) in seconds.
+ * This counts since response's `Date`.
*
* For an up-to-date value, see `timeToLive()`.
*
- * @return Number
+ * Returns the maximum age (freshness lifetime) of the response in seconds.
+ * @returns {number} The max-age value in seconds.
*/
maxAge() {
if (!this.storable() || this._rescc['no-cache']) {
@@ -446,29 +656,57 @@ module.exports = class CachePolicy {
return defaultMinTtl;
}
+ /**
+ * Remaining time this cache entry may be useful for, in *milliseconds*.
+ * You can use this as an expiration time for your cache storage.
+ *
+ * Prefer this method over `maxAge()`, because it includes other factors like `age` and `stale-while-revalidate`.
+ * @returns {number} Time-to-live in milliseconds.
+ */
timeToLive() {
const age = this.maxAge() - this.age();
const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);
const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);
- return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;
+ return Math.round(Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000);
}
+ /**
+ * If true, this cache entry is past its expiration date.
+ * Note that stale cache may be useful sometimes, see `evaluateRequest()`.
+ * @returns {boolean} `false` doesn't mean it's fresh nor usable
+ */
stale() {
return this.maxAge() <= this.age();
}
+ /**
+ * @returns {boolean} `true` if `stale-if-error` condition allows use of a stale response.
+ */
_useStaleIfError() {
return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();
}
+ /** See `evaluateRequest()` for a more complete solution
+ * @returns {boolean} `true` if `stale-while-revalidate` is currently allowed.
+ */
useStaleWhileRevalidate() {
- return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();
+ const swr = toNumberOrZero(this._rescc['stale-while-revalidate']);
+ return swr > 0 && this.maxAge() + swr > this.age();
}
+ /**
+ * Creates a `CachePolicy` instance from a serialized object.
+ * @param {Object} obj - The serialized object.
+ * @returns {CachePolicy} A new CachePolicy instance.
+ */
static fromObject(obj) {
return new this(undefined, undefined, { _fromObject: obj });
}
+ /**
+ * @param {any} obj - The serialized object.
+ * @throws {Error} If already initialized or if the object is invalid.
+ */
_fromObject(obj) {
if (this._responseTime) throw Error('Reinitialized');
if (!obj || obj.v !== 1) throw Error('Invalid serialization');
@@ -478,6 +716,7 @@ module.exports = class CachePolicy {
this._cacheHeuristic = obj.ch;
this._immutableMinTtl =
obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
+ this._ignoreCargoCult = !!obj.icc;
this._status = obj.st;
this._resHeaders = obj.resh;
this._rescc = obj.rescc;
@@ -489,6 +728,10 @@ module.exports = class CachePolicy {
this._reqcc = obj.reqcc;
}
+ /**
+ * Serializes the `CachePolicy` instance into a JSON-serializable object.
+ * @returns {Object} The serialized object.
+ */
toObject() {
return {
v: 1,
@@ -496,6 +739,7 @@ module.exports = class CachePolicy {
sh: this._isShared,
ch: this._cacheHeuristic,
imm: this._immutableMinTtl,
+ icc: this._ignoreCargoCult,
st: this._status,
resh: this._resHeaders,
rescc: this._rescc,
@@ -514,6 +758,8 @@ module.exports = class CachePolicy {
*
* Hop by hop headers are always stripped.
* Revalidation headers may be added or removed, depending on request.
+ * @param {HttpRequest} incomingReq - The incoming HTTP request.
+ * @returns {Record} The headers for the revalidation request.
*/
revalidationHeaders(incomingReq) {
this._assertRequestHasHeaders(incomingReq);
@@ -578,17 +824,22 @@ module.exports = class CachePolicy {
* Returns {policy, modified} where modified is a boolean indicating
* whether the response body has been modified, and old cached body can't be used.
*
- * @return {Object} {policy: CachePolicy, modified: Boolean}
+ * @param {HttpRequest} request - The latest HTTP request asking for the cached entry.
+ * @param {HttpResponse} response - The latest revalidation HTTP response from the origin server.
+ * @returns {{policy: CachePolicy, modified: boolean, matches: boolean}} The updated policy and modification status.
+ * @throws {Error} If the response headers are missing.
*/
revalidatedPolicy(request, response) {
this._assertRequestHasHeaders(request);
- if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful
+
+ if (this._useStaleIfError() && isErrorResponse(response)) {
return {
- modified: false,
- matches: false,
- policy: this,
+ policy: this,
+ modified: false,
+ matches: true,
};
}
+
if (!response || !response.headers) {
throw Error('Response headers missing');
}
@@ -635,9 +886,16 @@ module.exports = class CachePolicy {
}
}
+ const optionsCopy = {
+ shared: this._isShared,
+ cacheHeuristic: this._cacheHeuristic,
+ immutableMinTimeToLive: this._immutableMinTtl,
+ ignoreCargoCult: this._ignoreCargoCult,
+ };
+
if (!matches) {
return {
- policy: new this.constructor(request, response),
+ policy: new this.constructor(request, response, optionsCopy),
// Client receiving 304 without body, even if it's invalid/mismatched has no option
// but to reuse a cached body. We don't have a good way to tell clients to do
// error recovery in such case.
@@ -662,11 +920,7 @@ module.exports = class CachePolicy {
headers,
});
return {
- policy: new this.constructor(request, newResponse, {
- shared: this._isShared,
- cacheHeuristic: this._cacheHeuristic,
- immutableMinTimeToLive: this._immutableMinTtl,
- }),
+ policy: new this.constructor(request, newResponse, optionsCopy),
modified: false,
matches: true,
};
diff --git a/deps/npm/node_modules/http-cache-semantics/package.json b/deps/npm/node_modules/http-cache-semantics/package.json
index defbb045a63832..d45dfa1274e0da 100644
--- a/deps/npm/node_modules/http-cache-semantics/package.json
+++ b/deps/npm/node_modules/http-cache-semantics/package.json
@@ -1,18 +1,22 @@
{
"name": "http-cache-semantics",
- "version": "4.1.1",
+ "version": "4.2.0",
"description": "Parses Cache-Control and other headers. Helps building correct HTTP caches and proxies",
- "repository": "https://github.com/kornelski/http-cache-semantics.git",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kornelski/http-cache-semantics.git"
+ },
"main": "index.js",
+ "types": "index.js",
"scripts": {
"test": "mocha"
},
"files": [
"index.js"
],
- "author": "Kornel Lesiński (https://kornel.ski/)",
+ "author": "Kornel Lesiński (https://kornel.ski/)",
"license": "BSD-2-Clause",
"devDependencies": {
- "mocha": "^10.0"
+ "mocha": "^11.0"
}
}
diff --git a/deps/npm/node_modules/https-proxy-agent/dist/index.js b/deps/npm/node_modules/https-proxy-agent/dist/index.js
index 0c91722035f07e..1857f464724e20 100644
--- a/deps/npm/node_modules/https-proxy-agent/dist/index.js
+++ b/deps/npm/node_modules/https-proxy-agent/dist/index.js
@@ -35,6 +35,17 @@ const agent_base_1 = require("agent-base");
const url_1 = require("url");
const parse_proxy_response_1 = require("./parse-proxy-response");
const debug = (0, debug_1.default)('https-proxy-agent');
+const setServernameFromNonIpHost = (options) => {
+ if (options.servername === undefined &&
+ options.host &&
+ !net.isIP(options.host)) {
+ return {
+ ...options,
+ servername: options.host,
+ };
+ }
+ return options;
+};
/**
* The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to
* the specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
@@ -82,11 +93,7 @@ class HttpsProxyAgent extends agent_base_1.Agent {
let socket;
if (proxy.protocol === 'https:') {
debug('Creating `tls.Socket`: %o', this.connectOpts);
- const servername = this.connectOpts.servername || this.connectOpts.host;
- socket = tls.connect({
- ...this.connectOpts,
- servername,
- });
+ socket = tls.connect(setServernameFromNonIpHost(this.connectOpts));
}
else {
debug('Creating `net.Socket`: %o', this.connectOpts);
@@ -122,11 +129,9 @@ class HttpsProxyAgent extends agent_base_1.Agent {
// The proxy is connecting to a TLS server, so upgrade
// this socket connection to a TLS connection.
debug('Upgrading socket connection to TLS');
- const servername = opts.servername || opts.host;
return tls.connect({
- ...omit(opts, 'host', 'path', 'port'),
+ ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'),
socket,
- servername,
});
}
return socket;
diff --git a/deps/npm/node_modules/https-proxy-agent/package.json b/deps/npm/node_modules/https-proxy-agent/package.json
index 3c793b769dc5d9..51b7e1175ff51b 100644
--- a/deps/npm/node_modules/https-proxy-agent/package.json
+++ b/deps/npm/node_modules/https-proxy-agent/package.json
@@ -1,6 +1,6 @@
{
"name": "https-proxy-agent",
- "version": "7.0.5",
+ "version": "7.0.6",
"description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -21,7 +21,7 @@
"author": "Nathan Rajlich (http://n8.io/)",
"license": "MIT",
"dependencies": {
- "agent-base": "^7.0.2",
+ "agent-base": "^7.1.2",
"debug": "4"
},
"devDependencies": {
diff --git a/deps/npm/node_modules/indent-string/index.js b/deps/npm/node_modules/indent-string/index.js
deleted file mode 100644
index e1ab804f2fd8a1..00000000000000
--- a/deps/npm/node_modules/indent-string/index.js
+++ /dev/null
@@ -1,35 +0,0 @@
-'use strict';
-
-module.exports = (string, count = 1, options) => {
- options = {
- indent: ' ',
- includeEmptyLines: false,
- ...options
- };
-
- if (typeof string !== 'string') {
- throw new TypeError(
- `Expected \`input\` to be a \`string\`, got \`${typeof string}\``
- );
- }
-
- if (typeof count !== 'number') {
- throw new TypeError(
- `Expected \`count\` to be a \`number\`, got \`${typeof count}\``
- );
- }
-
- if (typeof options.indent !== 'string') {
- throw new TypeError(
- `Expected \`options.indent\` to be a \`string\`, got \`${typeof options.indent}\``
- );
- }
-
- if (count === 0) {
- return string;
- }
-
- const regex = options.includeEmptyLines ? /^/gm : /^(?!\s*$)/gm;
-
- return string.replace(regex, options.indent.repeat(count));
-};
diff --git a/deps/npm/node_modules/indent-string/license b/deps/npm/node_modules/indent-string/license
deleted file mode 100644
index e7af2f77107d73..00000000000000
--- a/deps/npm/node_modules/indent-string/license
+++ /dev/null
@@ -1,9 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/indent-string/package.json b/deps/npm/node_modules/indent-string/package.json
deleted file mode 100644
index 497bb83bbd9b7f..00000000000000
--- a/deps/npm/node_modules/indent-string/package.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "name": "indent-string",
- "version": "4.0.0",
- "description": "Indent each line in a string",
- "license": "MIT",
- "repository": "sindresorhus/indent-string",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "engines": {
- "node": ">=8"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "keywords": [
- "indent",
- "string",
- "pad",
- "align",
- "line",
- "text",
- "each",
- "every"
- ],
- "devDependencies": {
- "ava": "^1.4.1",
- "tsd": "^0.7.2",
- "xo": "^0.24.0"
- }
-}
diff --git a/deps/npm/node_modules/is-cidr/LICENSE b/deps/npm/node_modules/is-cidr/LICENSE
new file mode 100644
index 00000000000000..9669c20f85511d
--- /dev/null
+++ b/deps/npm/node_modules/is-cidr/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) silverwind
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/is-cidr/package.json b/deps/npm/node_modules/is-cidr/package.json
index 4b0e95b9c78c7a..2e512b947e7f1d 100644
--- a/deps/npm/node_modules/is-cidr/package.json
+++ b/deps/npm/node_modules/is-cidr/package.json
@@ -1,6 +1,6 @@
{
"name": "is-cidr",
- "version": "5.1.0",
+ "version": "5.1.1",
"description": "Check if a string is an IP address in CIDR notation",
"author": "silverwind ",
"contributors": [
@@ -23,18 +23,17 @@
"cidr-regex": "^4.1.1"
},
"devDependencies": {
- "@types/node": "20.12.12",
+ "@types/node": "22.13.4",
"eslint": "8.57.0",
- "eslint-config-silverwind": "85.1.4",
- "eslint-config-silverwind-typescript": "3.2.7",
- "typescript": "5.4.5",
- "typescript-config-silverwind": "4.3.2",
- "updates": "16.1.1",
- "versions": "12.0.2",
- "vite": "5.2.11",
- "vite-config-silverwind": "1.1.2",
- "vite-plugin-dts": "3.9.1",
- "vitest": "1.6.0",
- "vitest-config-silverwind": "9.0.6"
+ "eslint-config-silverwind": "99.0.0",
+ "eslint-config-silverwind-typescript": "9.2.2",
+ "typescript": "5.7.3",
+ "typescript-config-silverwind": "7.0.0",
+ "updates": "16.4.2",
+ "versions": "12.1.3",
+ "vite": "6.1.0",
+ "vite-config-silverwind": "4.0.0",
+ "vitest": "3.0.5",
+ "vitest-config-silverwind": "10.0.0"
}
}
diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json
index 0022437adadc60..c13cea28bce064 100644
--- a/deps/npm/node_modules/libnpmaccess/package.json
+++ b/deps/npm/node_modules/libnpmaccess/package.json
@@ -18,7 +18,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -42,7 +42,7 @@
],
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json
index ccb499e78ff66d..9e2fff29442a1e 100644
--- a/deps/npm/node_modules/libnpmdiff/package.json
+++ b/deps/npm/node_modules/libnpmdiff/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmdiff",
- "version": "7.0.0",
+ "version": "7.0.1",
"description": "The registry diff",
"repository": {
"type": "git",
@@ -43,11 +43,11 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.0",
+ "@npmcli/arborist": "^8.0.1",
"@npmcli/installed-package-contents": "^3.0.0",
"binary-extensions": "^2.3.0",
"diff": "^5.1.0",
@@ -58,7 +58,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json
index 497b971a0841bc..f987e492b212f2 100644
--- a/deps/npm/node_modules/libnpmexec/package.json
+++ b/deps/npm/node_modules/libnpmexec/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmexec",
- "version": "9.0.0",
+ "version": "9.0.1",
"files": [
"bin/",
"lib/"
@@ -52,7 +52,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"bin-links": "^5.0.0",
"chalk": "^5.2.0",
"just-extend": "^6.2.0",
@@ -60,7 +60,7 @@
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.0",
+ "@npmcli/arborist": "^8.0.1",
"@npmcli/run-script": "^9.0.1",
"ci-info": "^4.0.0",
"npm-package-arg": "^12.0.0",
@@ -73,7 +73,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json
index 07c1e33f2a7c38..44599e839ef4a2 100644
--- a/deps/npm/node_modules/libnpmfund/package.json
+++ b/deps/npm/node_modules/libnpmfund/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmfund",
- "version": "6.0.0",
+ "version": "6.0.1",
"main": "lib/index.js",
"files": [
"bin/",
@@ -42,18 +42,18 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.0"
+ "@npmcli/arborist": "^8.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json
index 09157ab08cb209..1b707ed9c37b8f 100644
--- a/deps/npm/node_modules/libnpmhook/package.json
+++ b/deps/npm/node_modules/libnpmhook/package.json
@@ -36,7 +36,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -45,7 +45,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json
index 38800308a31a4e..55d2802ffbb518 100644
--- a/deps/npm/node_modules/libnpmorg/package.json
+++ b/deps/npm/node_modules/libnpmorg/package.json
@@ -29,7 +29,7 @@
],
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"minipass": "^7.1.1",
"nock": "^13.3.3",
"tap": "^16.3.8"
@@ -50,7 +50,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json
index 35d12425b02ff7..3d29025d85066d 100644
--- a/deps/npm/node_modules/libnpmpack/package.json
+++ b/deps/npm/node_modules/libnpmpack/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpack",
- "version": "8.0.0",
+ "version": "8.0.1",
"description": "Programmatic API for the bits behind npm pack",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -24,7 +24,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"spawk": "^1.7.1",
"tap": "^16.3.8"
@@ -37,7 +37,7 @@
"bugs": "https://github.com/npm/libnpmpack/issues",
"homepage": "https://npmjs.com/package/libnpmpack",
"dependencies": {
- "@npmcli/arborist": "^8.0.0",
+ "@npmcli/arborist": "^8.0.1",
"@npmcli/run-script": "^9.0.1",
"npm-package-arg": "^12.0.0",
"pacote": "^19.0.0"
@@ -47,7 +47,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index 594f5041480b4a..1118679c90828d 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -27,7 +27,7 @@
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -53,7 +53,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json
index a5d2ae424913ef..66c9cd289261fd 100644
--- a/deps/npm/node_modules/libnpmsearch/package.json
+++ b/deps/npm/node_modules/libnpmsearch/package.json
@@ -27,7 +27,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -46,7 +46,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json
index fd8f69669f15cf..f5ca76c6b1a164 100644
--- a/deps/npm/node_modules/libnpmteam/package.json
+++ b/deps/npm/node_modules/libnpmteam/package.json
@@ -17,7 +17,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -40,7 +40,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json
index cdc9e7bbdf718a..7c99fa9c5c0708 100644
--- a/deps/npm/node_modules/libnpmversion/package.json
+++ b/deps/npm/node_modules/libnpmversion/package.json
@@ -33,7 +33,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.24.4",
"require-inject": "^1.4.4",
"tap": "^16.3.8"
},
@@ -49,7 +49,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.24.4",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/minipass-fetch/lib/index.js
index da402161670e65..f0f4bb66dbb673 100644
--- a/deps/npm/node_modules/minipass-fetch/lib/index.js
+++ b/deps/npm/node_modules/minipass-fetch/lib/index.js
@@ -318,8 +318,7 @@ const fetch = async (url, opts) => {
if (codings === 'deflate' || codings === 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
- const raw = res.pipe(new Minipass())
- raw.once('data', chunk => {
+ res.once('data', chunk => {
// see http://stackoverflow.com/questions/37519828
const decoder = (chunk[0] & 0x0F) === 0x08
? new zlib.Inflate()
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc99..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d0..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index a6269b505f47cc..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,333 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new realZlib[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- assert(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- assert(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-export class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
- opts.fullFlushFlag = constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants.Z_SYNC_FLUSH);
- assert(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-export class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-export class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-export class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-export class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-export class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-export class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-export class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json
deleted file mode 100644
index e94623ff43d353..00000000000000
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "name": "minizlib",
- "version": "3.0.1",
- "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "main": "./dist/commonjs/index.js",
- "dependencies": {
- "minipass": "^7.0.4",
- "rimraf": "^5.0.5"
- },
- "scripts": {
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "test": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --loglevel warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minizlib.git"
- },
- "keywords": [
- "zlib",
- "gzip",
- "gunzip",
- "deflate",
- "inflate",
- "compression",
- "zip",
- "unzip"
- ],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "MIT",
- "devDependencies": {
- "@types/node": "^20.11.29",
- "mkdirp": "^3.0.1",
- "tap": "^18.7.1",
- "tshy": "^1.12.0",
- "typedoc": "^0.25.12"
- },
- "files": [
- "dist"
- ],
- "engines": {
- "node": ">= 18"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 75,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- }
-}
diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json
index 6e248345980387..eb8a4d4fac40d4 100644
--- a/deps/npm/node_modules/minipass-fetch/package.json
+++ b/deps/npm/node_modules/minipass-fetch/package.json
@@ -1,6 +1,6 @@
{
"name": "minipass-fetch",
- "version": "4.0.0",
+ "version": "4.0.1",
"description": "An implementation of window.fetch in Node.js using Minipass streams",
"license": "MIT",
"main": "lib/index.js",
diff --git a/deps/npm/node_modules/minizlib/LICENSE b/deps/npm/node_modules/minizlib/LICENSE
index ffce7383f53e7f..49f7efe431c9ea 100644
--- a/deps/npm/node_modules/minizlib/LICENSE
+++ b/deps/npm/node_modules/minizlib/LICENSE
@@ -2,9 +2,9 @@ Minizlib was created by Isaac Z. Schlueter.
It is a derivative work of the Node.js project.
"""
-Copyright Isaac Z. Schlueter and Contributors
-Copyright Node.js contributors. All rights reserved.
-Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
+Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/minizlib/dist/commonjs/constants.js
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js
rename to deps/npm/node_modules/minizlib/dist/commonjs/constants.js
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/minizlib/dist/commonjs/index.js
similarity index 86%
rename from deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js
rename to deps/npm/node_modules/minizlib/dist/commonjs/index.js
index ad65eef0495076..b4906d27833720 100644
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js
+++ b/deps/npm/node_modules/minizlib/dist/commonjs/index.js
@@ -1,4 +1,37 @@
"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
@@ -7,11 +40,18 @@ exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unz
const assert_1 = __importDefault(require("assert"));
const buffer_1 = require("buffer");
const minipass_1 = require("minipass");
-const zlib_1 = __importDefault(require("zlib"));
+const realZlib = __importStar(require("zlib"));
const constants_js_1 = require("./constants.js");
var constants_js_2 = require("./constants.js");
Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
const OriginalBufferConcat = buffer_1.Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+ ? (makeNoOp) => {
+ buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+ }
+ : (_) => { };
const _superWrite = Symbol('_superWrite');
class ZlibError extends Error {
code;
@@ -69,7 +109,7 @@ class ZlibBase extends minipass_1.Minipass {
try {
// @types/node doesn't know that it exports the classes, but they're there
//@ts-ignore
- this.#handle = new zlib_1.default[mode](opts);
+ this.#handle = new realZlib[mode](opts);
}
catch (er) {
// make sure that all errors get decorated properly
@@ -159,7 +199,7 @@ class ZlibBase extends minipass_1.Minipass {
this.#handle.close = () => { };
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
- buffer_1.Buffer.concat = args => args;
+ passthroughBufferConcat(true);
let result = undefined;
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
@@ -167,12 +207,12 @@ class ZlibBase extends minipass_1.Minipass {
: this.#flushFlag;
result = this.#handle._processChunk(chunk, flushFlag);
// if we don't throw, reset it back how it was
- buffer_1.Buffer.concat = OriginalBufferConcat;
+ passthroughBufferConcat(false);
}
catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
- buffer_1.Buffer.concat = OriginalBufferConcat;
+ passthroughBufferConcat(false);
this.#onError(new ZlibError(err));
}
finally {
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/minizlib/dist/commonjs/package.json
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json
rename to deps/npm/node_modules/minizlib/dist/commonjs/package.json
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/minizlib/dist/esm/constants.js
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js
rename to deps/npm/node_modules/minizlib/dist/esm/constants.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/minizlib/dist/esm/index.js
similarity index 96%
rename from deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js
rename to deps/npm/node_modules/minizlib/dist/esm/index.js
index a6269b505f47cc..f33586a8ab0ec1 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js
+++ b/deps/npm/node_modules/minizlib/dist/esm/index.js
@@ -1,10 +1,17 @@
import assert from 'assert';
import { Buffer } from 'buffer';
import { Minipass } from 'minipass';
-import realZlib from 'zlib';
+import * as realZlib from 'zlib';
import { constants } from './constants.js';
export { constants } from './constants.js';
const OriginalBufferConcat = Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+ ? (makeNoOp) => {
+ Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+ }
+ : (_) => { };
const _superWrite = Symbol('_superWrite');
export class ZlibError extends Error {
code;
@@ -151,7 +158,7 @@ class ZlibBase extends Minipass {
this.#handle.close = () => { };
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
- Buffer.concat = args => args;
+ passthroughBufferConcat(true);
let result = undefined;
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
@@ -159,12 +166,12 @@ class ZlibBase extends Minipass {
: this.#flushFlag;
result = this.#handle._processChunk(chunk, flushFlag);
// if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat;
+ passthroughBufferConcat(false);
}
catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat;
+ passthroughBufferConcat(false);
this.#onError(new ZlibError(err));
}
finally {
diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/minizlib/dist/esm/package.json
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json
rename to deps/npm/node_modules/minizlib/dist/esm/package.json
diff --git a/deps/npm/node_modules/minizlib/package.json b/deps/npm/node_modules/minizlib/package.json
index 98825a549f3fdc..43cb855e15a5d8 100644
--- a/deps/npm/node_modules/minizlib/package.json
+++ b/deps/npm/node_modules/minizlib/package.json
@@ -1,17 +1,20 @@
{
"name": "minizlib",
- "version": "2.1.2",
+ "version": "3.0.2",
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "main": "index.js",
+ "main": "./dist/commonjs/index.js",
"dependencies": {
- "minipass": "^3.0.0",
- "yallist": "^4.0.0"
+ "minipass": "^7.1.2"
},
"scripts": {
- "test": "tap test/*.js --100 -J",
+ "prepare": "tshy",
+ "pretest": "npm run prepare",
+ "test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
- "postpublish": "git push origin --all; git push origin --tags"
+ "prepublishOnly": "git push origin --follow-tags",
+ "format": "prettier --write . --loglevel warn",
+ "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
},
"repository": {
"type": "git",
@@ -30,13 +33,48 @@
"author": "Isaac Z. Schlueter (http://blog.izs.me/)",
"license": "MIT",
"devDependencies": {
- "tap": "^14.6.9"
+ "@types/node": "^22.13.14",
+ "tap": "^21.1.0",
+ "tshy": "^3.0.2",
+ "typedoc": "^0.28.1"
},
"files": [
- "index.js",
- "constants.js"
+ "dist"
],
"engines": {
- "node": ">= 8"
- }
+ "node": ">= 18"
+ },
+ "tshy": {
+ "exports": {
+ "./package.json": "./package.json",
+ ".": "./src/index.ts"
+ }
+ },
+ "exports": {
+ "./package.json": "./package.json",
+ ".": {
+ "import": {
+ "types": "./dist/esm/index.d.ts",
+ "default": "./dist/esm/index.js"
+ },
+ "require": {
+ "types": "./dist/commonjs/index.d.ts",
+ "default": "./dist/commonjs/index.js"
+ }
+ }
+ },
+ "types": "./dist/commonjs/index.d.ts",
+ "type": "module",
+ "prettier": {
+ "semi": false,
+ "printWidth": 75,
+ "tabWidth": 2,
+ "useTabs": false,
+ "singleQuote": true,
+ "jsxSingleQuote": false,
+ "bracketSameLine": true,
+ "arrowParens": "avoid",
+ "endOfLine": "lf"
+ },
+ "module": "./dist/esm/index.js"
}
diff --git a/deps/npm/node_modules/node-gyp/.release-please-manifest.json b/deps/npm/node_modules/node-gyp/.release-please-manifest.json
index 26a3463a2e0bb3..f098464b1facdb 100644
--- a/deps/npm/node_modules/node-gyp/.release-please-manifest.json
+++ b/deps/npm/node_modules/node-gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "11.0.0"
+ ".": "11.2.0"
}
diff --git a/deps/npm/node_modules/node-gyp/CHANGELOG.md b/deps/npm/node_modules/node-gyp/CHANGELOG.md
index 8374a920b7caaa..e206e5d9f3e517 100644
--- a/deps/npm/node_modules/node-gyp/CHANGELOG.md
+++ b/deps/npm/node_modules/node-gyp/CHANGELOG.md
@@ -1,5 +1,55 @@
# Changelog
+## [11.2.0](https://github.com/nodejs/node-gyp/compare/v11.1.0...v11.2.0) (2025-04-01)
+
+
+### Features
+
+* update gyp-next to v0.20.0 ([#3149](https://github.com/nodejs/node-gyp/issues/3149)) ([80e9c79](https://github.com/nodejs/node-gyp/commit/80e9c795a739c490cfbc85633e63022b36a7c70d))
+
+
+### Bug Fixes
+
+* disable msbuild.exe nodeReuse ([#3112](https://github.com/nodejs/node-gyp/issues/3112)) ([0cf16d2](https://github.com/nodejs/node-gyp/commit/0cf16d29fe604266fb47325496287a63075ea532))
+* use maxRetries on fs.rm calls ([#3113](https://github.com/nodejs/node-gyp/issues/3113)) ([a2772a7](https://github.com/nodejs/node-gyp/commit/a2772a76709f939af1e80dd8fe766ca2143aa5bf))
+
+
+### Tests
+
+* fix wasm test on Windows ([#3145](https://github.com/nodejs/node-gyp/issues/3145)) ([ee1d6fd](https://github.com/nodejs/node-gyp/commit/ee1d6fd8d83c9dd3eae7df7ec533bb6b39e1a812))
+* use maxRetries with tests too ([#3150](https://github.com/nodejs/node-gyp/issues/3150)) ([0ccbe7e](https://github.com/nodejs/node-gyp/commit/0ccbe7e90afb096b46a7818ba127a4871237952e))
+
+
+### Doc
+
+* add ffi-napi to docs/README.md ([#3138](https://github.com/nodejs/node-gyp/issues/3138)) ([4885110](https://github.com/nodejs/node-gyp/commit/48851107ad8c5d2cf18a55e8bd2764f5938e7102))
+
+
+### Miscellaneous
+
+* switch to tinyglobby ([#3133](https://github.com/nodejs/node-gyp/issues/3133)) ([c3b3ab0](https://github.com/nodejs/node-gyp/commit/c3b3ab06ee0f092cd5c0646120d57e56d41b79fc))
+* update tinyglobby ([#3136](https://github.com/nodejs/node-gyp/issues/3136)) ([b21cf87](https://github.com/nodejs/node-gyp/commit/b21cf874f58883f3fd4dd07bec3b584fb07e831d))
+
+## [11.1.0](https://github.com/nodejs/node-gyp/compare/v11.0.0...v11.1.0) (2025-02-10)
+
+
+### Features
+
+* update gyp-next to v0.19.1 ([#3122](https://github.com/nodejs/node-gyp/issues/3122)) ([504250e](https://github.com/nodejs/node-gyp/commit/504250e5e3e27c6ef6dcfcaa744b36e1a99c1be8))
+
+
+### Bug Fixes
+
+* Find VC.Tools.ARM64 on arm64 machine ([#3075](https://github.com/nodejs/node-gyp/issues/3075)) ([b899fae](https://github.com/nodejs/node-gyp/commit/b899faed56270d3d8496da7576b5750b264c2c21))
+* try libnode.dll first in load_exe_hook ([#2834](https://github.com/nodejs/node-gyp/issues/2834)) ([b9d10a5](https://github.com/nodejs/node-gyp/commit/b9d10a5a37081e2a731937e43eca52c83609e7f5))
+
+
+### Miscellaneous
+
+* add gyp-next updater ([#3105](https://github.com/nodejs/node-gyp/issues/3105)) ([e3f9a77](https://github.com/nodejs/node-gyp/commit/e3f9a7756f65a7f4e50799017b3dc51d5bc195b2))
+* Test on Ubuntu-24.04-arm and Node.js v23 ([#3121](https://github.com/nodejs/node-gyp/issues/3121)) ([2530f51](https://github.com/nodejs/node-gyp/commit/2530f51cec3ba595184e5bcb7fe1245e240beb59))
+* Use astral-sh/ruff-action@v3 to run the Python linter ([#3114](https://github.com/nodejs/node-gyp/issues/3114)) ([94448fc](https://github.com/nodejs/node-gyp/commit/94448fcd9f090814bce1c4361471dae199dc2e82))
+
## [11.0.0](https://github.com/nodejs/node-gyp/compare/v10.3.1...v11.0.0) (2024-12-03)
diff --git a/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json b/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
index cbd0ca0683d981..589cd4553e1bde 100644
--- a/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
+++ b/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.18.1"
+ ".": "0.20.0"
}
diff --git a/deps/npm/node_modules/node-gyp/gyp/docs/Hacking.md b/deps/npm/node_modules/node-gyp/gyp/docs/Hacking.md
index 89b3b8bea923ec..156d485b5b82d1 100644
--- a/deps/npm/node_modules/node-gyp/gyp/docs/Hacking.md
+++ b/deps/npm/node_modules/node-gyp/gyp/docs/Hacking.md
@@ -24,7 +24,7 @@ to make sure your changes aren't breaking anything important.
You run the test driver with e.g.
``` sh
-$ python -m pip install --upgrade pip setuptools
+$ python -m pip install --upgrade pip
$ pip install --editable ".[dev]"
$ python -m pytest
```
@@ -34,7 +34,7 @@ See [Testing](Testing.md) for more details on the test framework.
Note that it can be handy to look at the project files output by the tests
to diagnose problems. The easiest way to do that is by kindly asking the
test driver to leave the temporary directories it creates in-place.
-This is done by setting the enviroment variable "PRESERVE", e.g.
+This is done by setting the environment variable "PRESERVE", e.g.
```
set PRESERVE=all # On Windows
diff --git a/deps/npm/node_modules/node-gyp/gyp/docs/InputFormatReference.md b/deps/npm/node_modules/node-gyp/gyp/docs/InputFormatReference.md
index 2b2c180f4443c5..4b114f2debca45 100644
--- a/deps/npm/node_modules/node-gyp/gyp/docs/InputFormatReference.md
+++ b/deps/npm/node_modules/node-gyp/gyp/docs/InputFormatReference.md
@@ -194,6 +194,7 @@ lists associated with the following keys, are treated as pathnames:
* include\_dirs
* inputs
* libraries
+ * library\_dirs
* outputs
* sources
* mac\_bundle\_resources
@@ -231,7 +232,8 @@ Source dictionary from `../build/common.gypi`:
```
{
'include_dirs': ['include'], # Treated as relative to ../build
- 'libraries': ['-lz'], # Not treated as a pathname, begins with a dash
+ 'library_dirs': ['lib'], # Treated as relative to ../build
+ 'libraries': ['-lz'], # Not treated as a pathname, begins with a dash
'defines': ['NDEBUG'], # defines does not contain pathnames
}
```
@@ -250,6 +252,7 @@ Merged dictionary:
{
'sources': ['string_util.cc'],
'include_dirs': ['../build/include'],
+ 'library_dirs': ['../build/lib'],
'libraries': ['-lz'],
'defines': ['NDEBUG'],
}
diff --git a/deps/npm/node_modules/node-gyp/gyp/docs/LanguageSpecification.md b/deps/npm/node_modules/node-gyp/gyp/docs/LanguageSpecification.md
index 178b8c83169919..f8fff097ab73f3 100644
--- a/deps/npm/node_modules/node-gyp/gyp/docs/LanguageSpecification.md
+++ b/deps/npm/node_modules/node-gyp/gyp/docs/LanguageSpecification.md
@@ -157,7 +157,7 @@ have structural meaning for target definitions:
| `all_dependent_settings` | A dictionary of settings to be applied to all dependents of the target, transitively. This includes direct dependents and the entire set of their dependents, and so on. This section may contain anything found within a `target` dictionary, except `configurations`, `target_name`, and `type` sections. Compare `direct_dependent_settings` and `link_settings`. |
| `configurations` | A list of dictionaries defining build configurations for the target. See the "Configurations" section below. |
| `copies` | A list of copy actions to perform. See the "Copies" section below. |
-| `defines` | A list of preprocesor definitions to be passed on the command line to the C/C++ compiler (via `-D` or `/D` options). |
+| `defines` | A list of preprocessor definitions to be passed on the command line to the C/C++ compiler (via `-D` or `/D` options). |
| `dependencies` | A list of targets on which this target depends. Targets in other `.gyp` files are specified as `../path/to/other.gyp:target_we_want`. |
| `direct_dependent_settings` | A dictionary of settings to be applied to other targets that depend on this target. These settings will only be applied to direct dependents. This section may contain anything found within a `target` dictionary, except `configurations`, `target_name`, and `type` sections. Compare with `all_dependent_settings` and `link_settings`. |
| `include_dirs` | A list of include directories to be passed on the command line to the C/C++ compiler (via `-I` or `/I` options). |
@@ -208,8 +208,8 @@ Configuration dictionaries may also contain these elements:
Conditionals may appear within any dictionary in a `.gyp` file. There
are two tpes of conditionals, which differ only in the timing of their
-processing. `conditons` sections are processed shortly after loading
-`.gyp` files, and `target_conditons` sections are processed after all
+processing. `conditions` sections are processed shortly after loading
+`.gyp` files, and `target_conditions` sections are processed after all
dependencies have been computed.
A conditional section is introduced with a `conditions` or
diff --git a/deps/npm/node_modules/node-gyp/gyp/docs/Testing.md b/deps/npm/node_modules/node-gyp/gyp/docs/Testing.md
index baeb65f9441c79..a52031e88819a6 100644
--- a/deps/npm/node_modules/node-gyp/gyp/docs/Testing.md
+++ b/deps/npm/node_modules/node-gyp/gyp/docs/Testing.md
@@ -392,7 +392,7 @@ fails the test if it does.
Verifies that the output string contains all of the "lines" in the specified
list of lines. In practice, the lines can be any substring and need not be
-`\n`-terminaed lines per se. If any line is missing, the test fails.
+`\n`-terminated lines per se. If any line is missing, the test fails.
```
test.must_not_contain_any_lines(output, lines)
@@ -400,7 +400,7 @@ list of lines. In practice, the lines can be any substring and need not be
Verifies that the output string does _not_ contain any of the "lines" in the
specified list of lines. In practice, the lines can be any substring and need
-not be `\n`-terminaed lines per se. If any line exists in the output string,
+not be `\n`-terminated lines per se. If any line exists in the output string,
the test fails.
```
@@ -409,7 +409,7 @@ the test fails.
Verifies that the output string contains at least one of the "lines" in the
specified list of lines. In practice, the lines can be any substring and need
-not be `\n`-terminaed lines per se. If none of the specified lines is present,
+not be `\n`-terminated lines per se. If none of the specified lines is present,
the test fails.
### Reading file contents
diff --git a/deps/npm/node_modules/node-gyp/gyp/docs/UserDocumentation.md b/deps/npm/node_modules/node-gyp/gyp/docs/UserDocumentation.md
index 808f37a1a9361c..b9d412e1c847ba 100644
--- a/deps/npm/node_modules/node-gyp/gyp/docs/UserDocumentation.md
+++ b/deps/npm/node_modules/node-gyp/gyp/docs/UserDocumentation.md
@@ -104,7 +104,7 @@ describing all the information necessary to build the target.
`'conditions'`: A list of condition specifications that can modify the
contents of the items in the global dictionary defined by this `.gyp`
-file based on the values of different variablwes. As implied by the
+file based on the values of different variables. As implied by the
above example, the most common use of a `conditions` section in the
top-level dictionary is to add platform-specific targets to the
`targets` list.
@@ -375,7 +375,7 @@ If your platform-specific file does not contain a
already in the `conditions` for the target), and you can't change the
file name, there are two patterns that can be used.
-**Prefererred**: Add the file to the `sources` list of the appropriate
+**Preferred**: Add the file to the `sources` list of the appropriate
dictionary within the `targets` list. Add an appropriate `conditions`
section to exclude the specific files name:
@@ -807,7 +807,7 @@ directory:
```
Adding a library often involves updating multiple `.gyp` files, adding
-the target to the approprate `.gyp` file (possibly a newly-added `.gyp`
+the target to the appropriate `.gyp` file (possibly a newly-added `.gyp`
file), and updating targets in the other `.gyp` files that depend on
(link with) the new library.
@@ -858,7 +858,7 @@ because of those settings' being listed in the
`direct_dependent_settings` block.
Note that these settings will likely need to be replicated in the
-settings for the library target itsef, so that the library will build
+settings for the library target itself, so that the library will build
with the same options. This does not prevent the target from defining
additional options for its "internal" use when compiling its own source
files. (In the above example, these are the `LOCAL_DEFINE_FOR_LIBBAR`
diff --git a/deps/npm/node_modules/node-gyp/gyp/gyp_main.py b/deps/npm/node_modules/node-gyp/gyp/gyp_main.py
index f23dcdf882d1b0..bf169874851463 100755
--- a/deps/npm/node_modules/node-gyp/gyp/gyp_main.py
+++ b/deps/npm/node_modules/node-gyp/gyp/gyp_main.py
@@ -5,8 +5,8 @@
# found in the LICENSE file.
import os
-import sys
import subprocess
+import sys
def IsCygwin():
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
index 629f3f61b4819d..339d27d4029fcf 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
@@ -4,7 +4,7 @@
"""Visual Studio project reader/writer."""
-import gyp.easy_xml as easy_xml
+from gyp import easy_xml
# ------------------------------------------------------------------------------
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
index ac87f572b240de..fea6e672865bfe 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -171,7 +171,7 @@ def ValidateMSBuild(self, value):
int(value, self._msbuild_base)
def ConvertToMSBuild(self, value):
- msbuild_format = (self._msbuild_base == 10) and "%d" or "0x%04x"
+ msbuild_format = ((self._msbuild_base == 10) and "%d") or "0x%04x"
return msbuild_format % int(value)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
index 6ca09687ad7f13..0504728d994ca8 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
@@ -7,10 +7,10 @@
"""Unit tests for the MSVSSettings.py file."""
import unittest
-import gyp.MSVSSettings as MSVSSettings
-
from io import StringIO
+from gyp import MSVSSettings
+
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
index 2e5c811bdde322..901ba84588589b 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
@@ -4,7 +4,7 @@
"""Visual Studio project reader/writer."""
-import gyp.easy_xml as easy_xml
+from gyp import easy_xml
class Writer:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
index e580c00fb76d3e..23d3e16953c43a 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -8,8 +8,7 @@
import re
import socket # for gethostname
-import gyp.easy_xml as easy_xml
-
+from gyp import easy_xml
# ------------------------------------------------------------------------------
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
index 36bb782bd319a2..27647f11d07467 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -7,7 +7,6 @@
import copy
import os
-
# A dictionary mapping supported target types to extensions.
TARGET_TYPE_EXT = {
"executable": "exe",
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
index 8d7f21e82dd2f8..93f48bc05c8dc5 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -5,11 +5,11 @@
"""Handle version information related to Visual Stuio."""
import errno
+import glob
import os
import re
import subprocess
import sys
-import glob
def JoinPath(*args):
@@ -69,7 +69,7 @@ def UsesVcxproj(self):
def ProjectExtension(self):
"""Returns the file extension for the project."""
- return self.uses_vcxproj and ".vcxproj" or ".vcproj"
+ return (self.uses_vcxproj and ".vcxproj") or ".vcproj"
def Path(self):
"""Returns the path to Visual Studio installation."""
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
index d6cc01307d997c..77800661a48c0e 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -4,17 +4,18 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from __future__ import annotations
-import copy
-import gyp.input
import argparse
+import copy
import os.path
import re
import shlex
import sys
import traceback
-from gyp.common import GypError
+import gyp.input
+from gyp.common import GypError
# Default debug modes for GYP
debug = {}
@@ -24,6 +25,18 @@
DEBUG_VARIABLES = "variables"
DEBUG_INCLUDES = "includes"
+def EscapeForCString(string: bytes | str) -> str:
+ if isinstance(string, str):
+ string = string.encode(encoding='utf8')
+
+ backslash_or_double_quote = {ord('\\'), ord('"')}
+ result = ''
+ for char in string:
+ if char in backslash_or_double_quote or not 32 <= char < 127:
+ result += '\\%03o' % char
+ else:
+ result += chr(char)
+ return result
def DebugOutput(mode, message, *args):
if "all" in gyp.debug or mode in gyp.debug:
@@ -106,18 +119,19 @@ def Load(
output_dir = params["options"].generator_output or params["options"].toplevel_dir
if default_variables["GENERATOR"] == "ninja":
- default_variables.setdefault(
- "PRODUCT_DIR_ABS",
- os.path.join(
- output_dir, "out", default_variables.get("build_type", "default")
- ),
+ product_dir_abs = os.path.join(
+ output_dir, "out", default_variables.get("build_type", "default")
)
else:
- default_variables.setdefault(
- "PRODUCT_DIR_ABS",
- os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
+ product_dir_abs = os.path.join(
+ output_dir, default_variables["CONFIGURATION_NAME"]
)
+ default_variables.setdefault("PRODUCT_DIR_ABS", product_dir_abs)
+ default_variables.setdefault(
+ "PRODUCT_DIR_ABS_CSTR", EscapeForCString(product_dir_abs)
+ )
+
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, "CalculateVariables", None):
@@ -192,8 +206,7 @@ def NameValueListToDict(name_value_list):
def ShlexEnv(env_name):
- flags = os.environ.get(env_name, [])
- if flags:
+ if flags := os.environ.get(env_name) or []:
flags = shlex.split(flags)
return flags
@@ -253,7 +266,7 @@ def Noop(value):
for name, metadata in options._regeneration_metadata.items():
opt = metadata["opt"]
value = getattr(options, name)
- value_predicate = metadata["type"] == "path" and FixPath or Noop
+ value_predicate = (metadata["type"] == "path" and FixPath) or Noop
action = metadata["action"]
env_name = metadata["env_name"]
if action == "append":
@@ -348,7 +361,7 @@ def gyp_main(args):
action="store",
env_name="GYP_CONFIG_DIR",
default=None,
- help="The location for configuration files like " "include.gypi.",
+ help="The location for configuration files like include.gypi.",
)
parser.add_argument(
"-d",
@@ -512,19 +525,18 @@ def gyp_main(args):
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
- generate_formats = os.environ.get("GYP_GENERATORS", [])
+ generate_formats = os.environ.get("GYP_GENERATORS") or []
if generate_formats:
generate_formats = re.split(r"[\s,]", generate_formats)
if generate_formats:
options.formats = generate_formats
+ # Nothing in the variable, default based on platform.
+ elif sys.platform == "darwin":
+ options.formats = ["xcode"]
+ elif sys.platform in ("win32", "cygwin"):
+ options.formats = ["msvs"]
else:
- # Nothing in the variable, default based on platform.
- if sys.platform == "darwin":
- options.formats = ["xcode"]
- elif sys.platform in ("win32", "cygwin"):
- options.formats = ["msvs"]
- else:
- options.formats = ["make"]
+ options.formats = ["make"]
if not options.generator_output and options.use_environment:
g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
@@ -683,7 +695,7 @@ def main(args):
return 1
-# NOTE: setuptools generated console_scripts calls function with no arguments
+# NOTE: console_scripts calls this function with no arguments
def script_main():
return main(sys.argv[1:])
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
index 762ae021090cac..fbf1024fc38319 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -6,11 +6,10 @@
import filecmp
import os.path
import re
-import tempfile
-import sys
-import subprocess
import shlex
-
+import subprocess
+import sys
+import tempfile
from collections.abc import MutableSet
@@ -35,7 +34,6 @@ class GypError(Exception):
to the user. The main entry point will catch and display this.
"""
- pass
def ExceptionAppend(e, msg):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
index b6c4cccc1ac5ca..bd7172afaf3697 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
@@ -6,11 +6,13 @@
"""Unit tests for the common.py file."""
-import gyp.common
-import unittest
-import sys
import os
-from unittest.mock import patch, MagicMock
+import sys
+import unittest
+from unittest.mock import MagicMock, patch
+
+import gyp.common
+
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
@@ -109,14 +111,14 @@ def mock_run(env, defines_stdout, expected_cmd):
return [defines, flavor]
[defines1, _] = mock_run({}, "", [])
- assert {} == defines1
+ assert defines1 == {}
[defines2, flavor2] = mock_run(
{ "CC_target": "/opt/wasi-sdk/bin/clang" },
"#define __wasm__ 1\n#define __wasi__ 1\n",
["/opt/wasi-sdk/bin/clang"]
)
- assert { "__wasm__": "1", "__wasi__": "1" } == defines2
+ assert defines2 == { "__wasm__": "1", "__wasi__": "1" }
assert flavor2 == "wasi"
[defines3, flavor3] = mock_run(
@@ -124,7 +126,7 @@ def mock_run(env, defines_stdout, expected_cmd):
"#define __wasm__ 1\n",
["/opt/wasi-sdk/bin/clang", "--target=wasm32"]
)
- assert { "__wasm__": "1" } == defines3
+ assert defines3 == { "__wasm__": "1" }
assert flavor3 == "wasm"
[defines4, flavor4] = mock_run(
@@ -132,7 +134,7 @@ def mock_run(env, defines_stdout, expected_cmd):
"#define __EMSCRIPTEN__ 1\n",
["/emsdk/upstream/emscripten/emcc"]
)
- assert { "__EMSCRIPTEN__": "1" } == defines4
+ assert defines4 == { "__EMSCRIPTEN__": "1" }
assert flavor4 == "emscripten"
# Test path which include white space
@@ -149,11 +151,11 @@ def mock_run(env, defines_stdout, expected_cmd):
"-pthread"
]
)
- assert {
+ assert defines5 == {
"__wasm__": "1",
"__wasi__": "1",
"_REENTRANT": "1"
- } == defines5
+ }
assert flavor5 == "wasi"
original_sep = os.sep
@@ -164,7 +166,7 @@ def mock_run(env, defines_stdout, expected_cmd):
["C:/Program Files/wasi-sdk/clang.exe"]
)
os.sep = original_sep
- assert { "__wasm__": "1", "__wasi__": "1" } == defines6
+ assert defines6 == { "__wasm__": "1", "__wasi__": "1" }
assert flavor6 == "wasi"
if __name__ == "__main__":
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
index 02567b251446d7..e4d2f82b687418 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -2,10 +2,10 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import sys
-import re
-import os
import locale
+import os
+import re
+import sys
from functools import reduce
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
index 2d9b15210dc126..bb97b802c59551 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
@@ -6,11 +6,11 @@
""" Unit tests for the easy_xml.py file. """
-import gyp.easy_xml as easy_xml
import unittest
-
from io import StringIO
+from gyp import easy_xml
+
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
index 1334f2fca9967c..cb18742cd8df6d 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -63,11 +63,12 @@
"""
-import gyp.common
import json
import os
import posixpath
+import gyp.common
+
debug = False
found_dependency_string = "Found dependency"
@@ -157,7 +158,7 @@ def _AddSources(sources, base_path, base_path_components, result):
and tracked in some other means."""
# NOTE: gyp paths are always posix style.
for source in sources:
- if not len(source) or source.startswith("!!!") or source.startswith("$"):
+ if not len(source) or source.startswith(("!!!", "$")):
continue
# variable expansion may lead to //.
org_source = source
@@ -699,7 +700,7 @@ def find_matching_test_target_names(self):
) & set(self._root_targets)
if matching_test_targets_contains_all:
# Remove any of the targets for all that were not explicitly supplied,
- # 'all' is subsequentely added to the matching names below.
+ # 'all' is subsequently added to the matching names below.
matching_test_targets = list(
set(matching_test_targets) & set(test_targets_no_all)
)
@@ -747,7 +748,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if not config.files:
raise Exception(
- "Must specify files to analyze via config_path generator " "flag"
+ "Must specify files to analyze via config_path generator flag"
)
toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
index 2a63f412dbc836..5ebe58bb556d80 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -15,13 +15,14 @@
# Try to avoid setting global variables where possible.
-import gyp
-import gyp.common
-import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
+import gyp
+import gyp.common
+from gyp.generator import make # Reuse global functions from make backend.
+
generator_default_variables = {
"OS": "android",
"EXECUTABLE_PREFIX": "",
@@ -177,7 +178,7 @@ def Write(
self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)")
elif sdk_version > 0:
self.WriteLn(
- "LOCAL_MODULE_TARGET_ARCH := " "$(TARGET_$(GYP_VAR_PREFIX)ARCH)"
+ "LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)"
)
self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version)
@@ -587,11 +588,10 @@ def WriteSources(self, spec, configs, extra_sources):
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
- if "$(gyp_shared_intermediate_dir)" in source:
- extra_sources.append(source)
- elif "$(gyp_intermediate_dir)" in source:
- extra_sources.append(source)
- elif IsCPPExtension(ext) and ext != local_cpp_extension:
+ if ("$(gyp_shared_intermediate_dir)" in source
+ or "$(gyp_intermediate_dir)" in source
+ or (IsCPPExtension(ext) and ext != local_cpp_extension)
+ ):
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
@@ -730,19 +730,18 @@ def ComputeOutput(self, spec):
path = "$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)"
else:
path = "$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)"
+ # Other targets just get built into their intermediate dir.
+ elif self.toolset == "host":
+ path = (
+ "$(call intermediates-dir-for,%s,%s,true,,"
+ "$(GYP_HOST_VAR_PREFIX))"
+ % (self.android_class, self.android_module)
+ )
else:
- # Other targets just get built into their intermediate dir.
- if self.toolset == "host":
- path = (
- "$(call intermediates-dir-for,%s,%s,true,,"
- "$(GYP_HOST_VAR_PREFIX))"
- % (self.android_class, self.android_module)
- )
- else:
- path = (
- f"$(call intermediates-dir-for,{self.android_class},"
- f"{self.android_module},,,$(GYP_VAR_PREFIX))"
- )
+ path = (
+ f"$(call intermediates-dir-for,{self.android_class},"
+ f"{self.android_module},,,$(GYP_VAR_PREFIX))"
+ )
assert spec.get("product_dir") is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
@@ -769,7 +768,7 @@ def ExtractIncludesFromCFlags(self, cflags):
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
- A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
+ A tuple of lists: (clean_cflags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
index 320a891aa8adc9..e69103e1b9ba3f 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -33,6 +33,7 @@
import os
import signal
import subprocess
+
import gyp.common
import gyp.xcode_emulation
@@ -251,7 +252,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, o
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(, )] to append with generated source files.
- extra_deps: [] to append with generated targets.
+ extra_deps: [] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
@@ -340,7 +341,7 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, outpu
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(, )] to append with generated source files.
- extra_deps: [] to append with generated targets.
+ extra_deps: [] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
@@ -457,7 +458,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
- extra_deps: [] to append with generated targets.
+ extra_deps: [] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
@@ -603,7 +604,7 @@ class CMakeNamer:
"""
def __init__(self, target_list):
- self.cmake_target_base_names_conficting = set()
+ self.cmake_target_base_names_conflicting = set()
cmake_target_base_names_seen = set()
for qualified_target in target_list:
@@ -612,11 +613,11 @@ def __init__(self, target_list):
if cmake_target_base_name not in cmake_target_base_names_seen:
cmake_target_base_names_seen.add(cmake_target_base_name)
else:
- self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+ self.cmake_target_base_names_conflicting.add(cmake_target_base_name)
def CreateCMakeTargetName(self, qualified_target):
base_name = CreateCMakeTargetBaseName(qualified_target)
- if base_name in self.cmake_target_base_names_conficting:
+ if base_name in self.cmake_target_base_names_conflicting:
return CreateCMakeTargetFullName(qualified_target)
return base_name
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
index 5d7f14da9699da..bebb1303154e16 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
@@ -2,11 +2,12 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import gyp.common
-import gyp.xcode_emulation
import json
import os
+import gyp.common
+import gyp.xcode_emulation
+
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
index 99d5c1fd69db36..e41c72d71070aa 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -3,11 +3,12 @@
# found in the LICENSE file.
+import json
import os
+
import gyp
import gyp.common
import gyp.msvs_emulation
-import json
generator_supports_multiple_toolsets = True
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
index 52aeae6050990b..ed6daa91bac3e7 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -17,14 +17,15 @@
This generator has no automated tests, so expect it to be broken.
"""
-from xml.sax.saxutils import escape
import os.path
+import shlex
import subprocess
+import xml.etree.ElementTree as ET
+from xml.sax.saxutils import escape
+
import gyp
import gyp.common
import gyp.msvs_emulation
-import shlex
-import xml.etree.ElementTree as ET
generator_wants_static_library_dependencies_adjusted = False
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
index 4171704c47a4b6..a0aa6d9245c811 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -31,9 +31,9 @@
"""
-import gyp.common
import pprint
+import gyp.common
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
index 8dfb1f1645f77c..36a05deb7eb8b9 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -17,7 +17,6 @@
import code
import sys
-
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
index 392d900914dea9..e860479069abaa 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -22,17 +22,17 @@
# the side to keep the files readable.
+import hashlib
import os
import re
import subprocess
import sys
+
import gyp
import gyp.common
import gyp.xcode_emulation
from gyp.common import GetEnvironFallback
-import hashlib
-
generator_default_variables = {
"EXECUTABLE_PREFIX": "",
"EXECUTABLE_SUFFIX": "",
@@ -208,7 +208,7 @@ def CalculateGeneratorInputInfo(params):
LINK_COMMANDS_MAC = """\
quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+cmd_alink = rm -f $@ && %(python)s gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %%.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
@@ -218,7 +218,7 @@ def CalculateGeneratorInputInfo(params):
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-""" # noqa: E501
+""" % {'python': sys.executable} # noqa: E501
LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
@@ -609,14 +609,14 @@ def CalculateGeneratorInputInfo(params):
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+cmd_mac_tool = %(python)s gyp-mac-tool $(4) $< "$@"
quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+cmd_mac_package_framework = %(python)s gyp-mac-tool package-framework "$@" $(4)
quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-""" # noqa: E501
+""" % {'python': sys.executable} # noqa: E501
def WriteRootHeaderSuffixRules(writer):
@@ -788,7 +788,7 @@ def __init__(self, generator_flags, flavor):
self.suffix_rules_objdir2 = {}
# Generate suffix rules for all compilable extensions.
- for ext in COMPILABLE_EXTENSIONS:
+ for ext, value in COMPILABLE_EXTENSIONS.items():
# Suffix rules for source folder.
self.suffix_rules_srcdir.update(
{
@@ -797,7 +797,7 @@ def __init__(self, generator_flags, flavor):
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
\t@$(call do_cmd,%s,1)
"""
- % (ext, COMPILABLE_EXTENSIONS[ext])
+ % (ext, value)
)
}
)
@@ -810,7 +810,7 @@ def __init__(self, generator_flags, flavor):
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
\t@$(call do_cmd,%s,1)
"""
- % (ext, COMPILABLE_EXTENSIONS[ext])
+ % (ext, value)
)
}
)
@@ -821,7 +821,7 @@ def __init__(self, generator_flags, flavor):
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
\t@$(call do_cmd,%s,1)
"""
- % (ext, COMPILABLE_EXTENSIONS[ext])
+ % (ext, value)
)
}
)
@@ -1440,7 +1440,7 @@ def WriteSources(
for obj in objs:
assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj
self.WriteLn(
- "# Add to the list of files we specially track " "dependencies for."
+ "# Add to the list of files we specially track dependencies for."
)
self.WriteLn("all_deps += $(OBJS)")
self.WriteLn()
@@ -1450,7 +1450,7 @@ def WriteSources(
self.WriteMakeRule(
["$(OBJS)"],
deps,
- comment="Make sure our dependencies are built " "before any of us.",
+ comment="Make sure our dependencies are built before any of us.",
order_only=True,
)
@@ -1461,7 +1461,7 @@ def WriteSources(
self.WriteMakeRule(
["$(OBJS)"],
extra_outputs,
- comment="Make sure our actions/rules run " "before any of us.",
+ comment="Make sure our actions/rules run before any of us.",
order_only=True,
)
@@ -1699,7 +1699,7 @@ def WriteTarget(
self.WriteMakeRule(
extra_outputs,
deps,
- comment=("Preserve order dependency of " "special output on deps."),
+ comment=("Preserve order dependency of special output on deps."),
order_only=True,
)
@@ -1738,7 +1738,8 @@ def WriteTarget(
# into the link command, so we need lots of escaping.
ldflags.append(r"-Wl,-rpath=\$$ORIGIN/")
ldflags.append(r"-Wl,-rpath-link=\$(builddir)/")
- library_dirs = config.get("library_dirs", [])
+ if library_dirs := config.get("library_dirs", []):
+ library_dirs = [Sourceify(self.Absolutify(i)) for i in library_dirs]
ldflags += [("-L%s" % library_dir) for library_dir in library_dirs]
self.WriteList(ldflags, "LDFLAGS_%s" % configname)
if self.flavor == "mac":
@@ -1779,13 +1780,13 @@ def WriteTarget(
# using ":=".
self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
- for configname in target_postbuilds:
+ for configname, value in target_postbuilds.items():
self.WriteLn(
"%s: TARGET_POSTBUILDS_%s := %s"
% (
QuoteSpaces(self.output),
configname,
- gyp.common.EncodePOSIXShellList(target_postbuilds[configname]),
+ gyp.common.EncodePOSIXShellList(value),
)
)
@@ -1834,7 +1835,7 @@ def WriteTarget(
# Since this target depends on binary and resources which are in
# nested subfolders, the framework directory will be older than
# its dependencies usually. To prevent this rule from executing
- # on every build (expensive, especially with postbuilds), expliclity
+ # on every build (expensive, especially with postbuilds), explicitly
# update the time on the framework directory.
self.WriteLn("\t@touch -c %s" % QuoteSpaces(self.output))
@@ -1844,7 +1845,7 @@ def WriteTarget(
"on the bundle, not the binary (target '%s')" % self.target
)
assert "product_dir" not in spec, (
- "Postbuilds do not work with " "custom product_dir"
+ "Postbuilds do not work with custom product_dir"
)
if self.type == "executable":
@@ -1895,21 +1896,20 @@ def WriteTarget(
part_of_all,
postbuilds=postbuilds,
)
+ elif self.flavor in ("linux", "android"):
+ self.WriteMakeRule(
+ [self.output_binary],
+ link_deps,
+ actions=["$(call create_archive,$@,$^)"],
+ )
else:
- if self.flavor in ("linux", "android"):
- self.WriteMakeRule(
- [self.output_binary],
- link_deps,
- actions=["$(call create_archive,$@,$^)"],
- )
- else:
- self.WriteDoCmd(
- [self.output_binary],
- link_deps,
- "alink",
- part_of_all,
- postbuilds=postbuilds,
- )
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "alink",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
elif self.type == "shared_library":
self.WriteLn(
"%s: LD_INPUTS := %s"
@@ -2498,7 +2498,7 @@ def CalculateMakefilePath(build_file, base_name):
"PLI.host": PLI_host,
}
if flavor == "mac":
- flock_command = "./gyp-mac-tool flock"
+ flock_command = "%s gyp-mac-tool flock" % sys.executable
header_params.update(
{
"flock": flock_command,
@@ -2548,7 +2548,7 @@ def CalculateMakefilePath(build_file, base_name):
header_params.update(
{
"copy_archive_args": copy_archive_arguments,
- "flock": "./gyp-flock-tool flock",
+ "flock": "%s gyp-flock-tool flock" % sys.executable,
"flock_index": 2,
}
)
@@ -2564,7 +2564,7 @@ def CalculateMakefilePath(build_file, base_name):
{
"copy_archive_args": copy_archive_arguments,
"link_commands": LINK_COMMANDS_AIX,
- "flock": "./gyp-flock-tool flock",
+ "flock": "%s gyp-flock-tool flock" % sys.executable,
"flock_index": 2,
}
)
@@ -2574,7 +2574,7 @@ def CalculateMakefilePath(build_file, base_name):
{
"copy_archive_args": copy_archive_arguments,
"link_commands": LINK_COMMANDS_OS400,
- "flock": "./gyp-flock-tool flock",
+ "flock": "%s gyp-flock-tool flock" % sys.executable,
"flock_index": 2,
}
)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
index 6b5b24acc00019..b4aea2e69a1939 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -9,22 +9,21 @@
import re
import subprocess
import sys
-
from collections import OrderedDict
import gyp.common
-import gyp.easy_xml as easy_xml
import gyp.generator.ninja as ninja_generator
-import gyp.MSVSNew as MSVSNew
-import gyp.MSVSProject as MSVSProject
-import gyp.MSVSSettings as MSVSSettings
-import gyp.MSVSToolFile as MSVSToolFile
-import gyp.MSVSUserFile as MSVSUserFile
-import gyp.MSVSUtil as MSVSUtil
-import gyp.MSVSVersion as MSVSVersion
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
+from gyp import (
+ MSVSNew,
+ MSVSProject,
+ MSVSSettings,
+ MSVSToolFile,
+ MSVSUserFile,
+ MSVSUtil,
+ MSVSVersion,
+ easy_xml,
+)
+from gyp.common import GypError, OrderedSet
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
@@ -185,7 +184,7 @@ def _IsWindowsAbsPath(path):
it does not treat those as relative, which results in bad paths like:
'..\\C:\\\\some_source_code_file.cc'
"""
- return path.startswith("c:") or path.startswith("C:")
+ return path.startswith(("c:", "C:"))
def _FixPaths(paths, separator="\\"):
@@ -276,7 +275,7 @@ def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
# TODO(bradnelson): ugly hack, fix this more generally!!!
if "Directories" in setting or "Dependencies" in setting:
- if type(value) == str:
+ if isinstance(value, str):
value = value.replace("/", "\\")
else:
value = [i.replace("/", "\\") for i in value]
@@ -288,7 +287,7 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
if tool.get(setting):
if only_if_unset:
return
- if type(tool[setting]) == list and type(value) == list:
+ if isinstance(tool[setting], list) and isinstance(value, list):
tool[setting] += value
else:
raise TypeError(
@@ -1423,7 +1422,7 @@ def _ConvertToolsToExpectedForm(tools):
# Collapse settings with lists.
settings_fixed = {}
for setting, value in settings.items():
- if type(value) == list:
+ if isinstance(value, list):
if (
tool == "VCLinkerTool" and setting == "AdditionalDependencies"
) or setting == "AdditionalOptions":
@@ -1816,7 +1815,7 @@ def _DictsToFolders(base_path, bucket, flat):
# Convert to folders recursively.
children = []
for folder, contents in bucket.items():
- if type(contents) == dict:
+ if isinstance(contents, dict):
folder_children = _DictsToFolders(
os.path.join(base_path, folder), contents, flat
)
@@ -1838,9 +1837,10 @@ def _CollapseSingles(parent, node):
# Recursively explorer the tree of dicts looking for projects which are
# the sole item in a folder which has the same name as the project. Bring
# such projects up one level.
- if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj":
+ if (isinstance(node, dict) and len(node) == 1 and
+ next(iter(node)) == parent + ".vcproj"):
return node[next(iter(node))]
- if type(node) != dict:
+ if not isinstance(node, dict):
return node
for child in node:
node[child] = _CollapseSingles(child, node[child])
@@ -1860,7 +1860,7 @@ def _GatherSolutionFolders(sln_projects, project_objects, flat):
# Walk down from the top until we hit a folder that has more than one entry.
# In practice, this strips the top-level "src/" dir from the hierarchy in
# the solution.
- while len(root) == 1 and type(root[next(iter(root))]) == dict:
+ while len(root) == 1 and isinstance(root[next(iter(root))], dict):
root = root[next(iter(root))]
# Collapse singles.
root = _CollapseSingles("", root)
@@ -2506,7 +2506,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
rule_name = rule.rule_name
target_outputs = "%%(%s.Outputs)" % rule_name
target_inputs = (
- "%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)"
+ "%%(%s.Identity);%%(%s.AdditionalDependencies);$(MSBuildProjectFile)"
) % (rule_name, rule_name)
rule_inputs = "%%(%s.Identity)" % rule_name
extension_condition = (
@@ -3099,9 +3099,7 @@ def _ConvertMSVSBuildAttributes(spec, config, build_file):
msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
elif a == "ConfigurationType":
msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
- elif a == "SpectreMitigation":
- msbuild_attributes[a] = msvs_attributes[a]
- elif a == "VCToolsVersion":
+ elif a == "SpectreMitigation" or a == "VCToolsVersion":
msbuild_attributes[a] = msvs_attributes[a]
else:
print("Warning: Do not know how to convert MSVS attribute " + a)
@@ -3274,7 +3272,7 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
num_configurations = len(spec["configurations"])
def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
+ # Use a definition of edges such that user_of_variable -> used_variable.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
edges = set()
@@ -3411,7 +3409,11 @@ def _FinalizeMSBuildSettings(spec, configuration):
)
# Turn on precompiled headers if appropriate.
if precompiled_header:
- precompiled_header = os.path.split(precompiled_header)[1]
+ # While MSVC works with just file name eg. "v8_pch.h", ClangCL requires
+ # the full path eg. "tools/msvs/pch/v8_pch.h" to find the file.
+ # P.S. Only ClangCL defines msbuild_toolset, for MSVC it is None.
+ if configuration.get("msbuild_toolset") != 'ClangCL':
+ precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use")
_ToolAppend(
msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header
@@ -3441,7 +3443,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
def _GetValueFormattedForMSBuild(tool_name, name, value):
- if type(value) == list:
+ if isinstance(value, list):
# For some settings, VS2010 does not automatically extends the settings
# TODO(jeanluc) Is this what we want?
if name in [
@@ -3486,11 +3488,10 @@ def _VerifySourcesExist(sources, root_dir):
for source in sources:
if isinstance(source, MSVSProject.Filter):
missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
- else:
- if "$" not in source:
- full_path = os.path.join(root_dir, source)
- if not os.path.exists(full_path):
- missing_sources.append(full_path)
+ elif "$" not in source:
+ full_path = os.path.join(root_dir, source)
+ if not os.path.exists(full_path):
+ missing_sources.append(full_path)
return missing_sources
@@ -3560,75 +3561,74 @@ def _AddSources2(
sources_handled_by_action,
list_excluded,
)
- else:
- if source not in sources_handled_by_action:
- detail = []
- excluded_configurations = exclusions.get(source, [])
- if len(excluded_configurations) == len(spec["configurations"]):
- detail.append(["ExcludedFromBuild", "true"])
- else:
- for config_name, configuration in sorted(excluded_configurations):
- condition = _GetConfigurationCondition(
- config_name, configuration
- )
- detail.append(
- ["ExcludedFromBuild", {"Condition": condition}, "true"]
- )
- # Add precompile if needed
- for config_name, configuration in spec["configurations"].items():
- precompiled_source = configuration.get(
- "msvs_precompiled_source", ""
+ elif source not in sources_handled_by_action:
+ detail = []
+ excluded_configurations = exclusions.get(source, [])
+ if len(excluded_configurations) == len(spec["configurations"]):
+ detail.append(["ExcludedFromBuild", "true"])
+ else:
+ for config_name, configuration in sorted(excluded_configurations):
+ condition = _GetConfigurationCondition(
+ config_name, configuration
)
- if precompiled_source != "":
- precompiled_source = _FixPath(precompiled_source)
- if not extensions_excluded_from_precompile:
- # If the precompiled header is generated by a C source,
- # we must not try to use it for C++ sources,
- # and vice versa.
- basename, extension = os.path.splitext(precompiled_source)
- if extension == ".c":
- extensions_excluded_from_precompile = [
- ".cc",
- ".cpp",
- ".cxx",
- ]
- else:
- extensions_excluded_from_precompile = [".c"]
-
- if precompiled_source == source:
- condition = _GetConfigurationCondition(
- config_name, configuration, spec
- )
- detail.append(
- ["PrecompiledHeader", {"Condition": condition}, "Create"]
- )
- else:
- # Turn off precompiled header usage for source files of a
- # different type than the file that generated the
- # precompiled header.
- for extension in extensions_excluded_from_precompile:
- if source.endswith(extension):
- detail.append(["PrecompiledHeader", ""])
- detail.append(["ForcedIncludeFiles", ""])
-
- group, element = _MapFileToMsBuildSourceType(
- source,
- rule_dependencies,
- extension_to_rule_name,
- _GetUniquePlatforms(spec),
- spec["toolset"],
+ detail.append(
+ ["ExcludedFromBuild", {"Condition": condition}, "true"]
+ )
+ # Add precompile if needed
+ for config_name, configuration in spec["configurations"].items():
+ precompiled_source = configuration.get(
+ "msvs_precompiled_source", ""
)
- if group == "compile" and not os.path.isabs(source):
- # Add an value to support duplicate source
- # file basenames, except for absolute paths to avoid paths
- # with more than 260 characters.
- file_name = os.path.splitext(source)[0] + ".obj"
- if file_name.startswith("..\\"):
- file_name = re.sub(r"^(\.\.\\)+", "", file_name)
- elif file_name.startswith("$("):
- file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name)
- detail.append(["ObjectFileName", "$(IntDir)\\" + file_name])
- grouped_sources[group].append([element, {"Include": source}] + detail)
+ if precompiled_source != "":
+ precompiled_source = _FixPath(precompiled_source)
+ if not extensions_excluded_from_precompile:
+ # If the precompiled header is generated by a C source,
+ # we must not try to use it for C++ sources,
+ # and vice versa.
+ basename, extension = os.path.splitext(precompiled_source)
+ if extension == ".c":
+ extensions_excluded_from_precompile = [
+ ".cc",
+ ".cpp",
+ ".cxx",
+ ]
+ else:
+ extensions_excluded_from_precompile = [".c"]
+
+ if precompiled_source == source:
+ condition = _GetConfigurationCondition(
+ config_name, configuration, spec
+ )
+ detail.append(
+ ["PrecompiledHeader", {"Condition": condition}, "Create"]
+ )
+ else:
+ # Turn off precompiled header usage for source files of a
+ # different type than the file that generated the
+ # precompiled header.
+ for extension in extensions_excluded_from_precompile:
+ if source.endswith(extension):
+ detail.append(["PrecompiledHeader", ""])
+ detail.append(["ForcedIncludeFiles", ""])
+
+ group, element = _MapFileToMsBuildSourceType(
+ source,
+ rule_dependencies,
+ extension_to_rule_name,
+ _GetUniquePlatforms(spec),
+ spec["toolset"],
+ )
+ if group == "compile" and not os.path.isabs(source):
+ # Add an value to support duplicate source
+ # file basenames, except for absolute paths to avoid paths
+ # with more than 260 characters.
+ file_name = os.path.splitext(source)[0] + ".obj"
+ if file_name.startswith("..\\"):
+ file_name = re.sub(r"^(\.\.\\)+", "", file_name)
+ elif file_name.startswith("$("):
+ file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name)
+ detail.append(["ObjectFileName", "$(IntDir)\\" + file_name])
+ grouped_sources[group].append([element, {"Include": source}] + detail)
def _GetMSBuildProjectReferences(project):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
index e80b57f06a130c..8cea3d1479e3b0 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -5,11 +5,11 @@
""" Unit tests for the msvs.py file. """
-import gyp.generator.msvs as msvs
import unittest
-
from io import StringIO
+from gyp.generator import msvs
+
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
index 0146c4996260a6..b7ac823d1490d6 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -10,20 +10,18 @@
import multiprocessing
import os.path
import re
-import signal
import shutil
+import signal
import subprocess
import sys
+from io import StringIO
+
import gyp
import gyp.common
import gyp.msvs_emulation
-import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
-
-from io import StringIO
-
+from gyp import MSVSUtil, ninja_syntax
from gyp.common import GetEnvironFallback
-import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
"EXECUTABLE_PREFIX": "",
@@ -1465,7 +1463,7 @@ def WriteLinkForArch(
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get("ldflags", [])
- if is_executable and len(solibs):
+ if is_executable and solibs:
rpath = "lib/"
if self.toolset != "target":
rpath += self.toolset
@@ -1555,7 +1553,7 @@ def WriteLinkForArch(
if pdbname:
output = [output, pdbname]
- if len(solibs):
+ if solibs:
extra_bindings.append(
("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs)))
)
@@ -2085,7 +2083,7 @@ def CommandWithWrapper(cmd, wrappers, prog):
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
- pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0))
+ pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY") or 0)
if pool_size:
return pool_size
@@ -2112,7 +2110,7 @@ class MEMORYSTATUSEX(ctypes.Structure):
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GiB machine.
mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GiB
- hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32)))
+ hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX") or 2 ** 32))
return min(mem_limit, hard_cap)
elif sys.platform.startswith("linux"):
if os.path.exists("/proc/meminfo"):
@@ -2535,7 +2533,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
% {"suffix": "@$link_file_list"},
rspfile="$link_file_list",
rspfile_content=(
- "-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs"
+ "-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs"
),
pool="link_pool",
)
@@ -2595,9 +2593,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
"alink",
description="LIBTOOL-STATIC $out, POSTBUILDS",
command="rm -f $out && "
- "./gyp-mac-tool filter-libtool libtool $libtool_flags "
+ "%s gyp-mac-tool filter-libtool libtool $libtool_flags "
"-static -o $out $in"
- "$postbuilds",
+ "$postbuilds" % sys.executable,
)
master_ninja.rule(
"lipo",
@@ -2684,7 +2682,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
master_ninja.rule(
"link",
description="LINK $out, POSTBUILDS",
- command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"),
+ command=("$ld $ldflags -o $out $in $solibs $libs$postbuilds"),
pool="link_pool",
)
master_ninja.rule(
@@ -2698,41 +2696,44 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
master_ninja.rule(
"copy_infoplist",
description="COPY INFOPLIST $in",
- command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys",
+ command="$env %s gyp-mac-tool copy-info-plist $in $out $binary $keys"
+ % sys.executable,
)
master_ninja.rule(
"merge_infoplist",
description="MERGE INFOPLISTS $in",
- command="$env ./gyp-mac-tool merge-info-plist $out $in",
+ command="$env %s gyp-mac-tool merge-info-plist $out $in" % sys.executable,
)
master_ninja.rule(
"compile_xcassets",
description="COMPILE XCASSETS $in",
- command="$env ./gyp-mac-tool compile-xcassets $keys $in",
+ command="$env %s gyp-mac-tool compile-xcassets $keys $in" % sys.executable,
)
master_ninja.rule(
"compile_ios_framework_headers",
description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in",
- command="$env ./gyp-mac-tool compile-ios-framework-header-map $out "
- "$framework $in && $env ./gyp-mac-tool "
- "copy-ios-framework-headers $framework $copy_headers",
+ command="$env %(python)s gyp-mac-tool compile-ios-framework-header-map "
+ "$out $framework $in && $env %(python)s gyp-mac-tool "
+ "copy-ios-framework-headers $framework $copy_headers"
+ % {'python': sys.executable},
)
master_ninja.rule(
"mac_tool",
description="MACTOOL $mactool_cmd $in",
- command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary",
+ command="$env %s gyp-mac-tool $mactool_cmd $in $out $binary"
+ % sys.executable,
)
master_ninja.rule(
"package_framework",
description="PACKAGE FRAMEWORK $out, POSTBUILDS",
- command="./gyp-mac-tool package-framework $out $version$postbuilds "
- "&& touch $out",
+ command="%s gyp-mac-tool package-framework $out $version$postbuilds "
+ "&& touch $out" % sys.executable,
)
master_ninja.rule(
"package_ios_framework",
description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS",
- command="./gyp-mac-tool package-ios-framework $out $postbuilds "
- "&& touch $out",
+ command="%s gyp-mac-tool package-ios-framework $out $postbuilds "
+ "&& touch $out" % sys.executable,
)
if flavor == "win":
master_ninja.rule(
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
index 15cddfdf2443bf..581b14595e143e 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -6,11 +6,11 @@
""" Unit tests for the ninja.py file. """
-from pathlib import Path
import sys
import unittest
+from pathlib import Path
-import gyp.generator.ninja as ninja
+from gyp.generator import ninja
class TestPrefixesAndSuffixes(unittest.TestCase):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
index 1ac672c3876bd9..cdf11c3b27b1d5 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -3,19 +3,19 @@
# found in the LICENSE file.
-import filecmp
-import gyp.common
-import gyp.xcodeproj_file
-import gyp.xcode_ninja
import errno
+import filecmp
import os
-import sys
import posixpath
import re
import shutil
import subprocess
+import sys
import tempfile
+import gyp.common
+import gyp.xcode_ninja
+import gyp.xcodeproj_file
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
@@ -793,7 +793,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
except KeyError as e:
gyp.common.ExceptionAppend(
e,
- "-- unknown product type while " "writing target %s" % target_name,
+ "-- unknown product type while writing target %s" % target_name,
)
raise
else:
@@ -959,7 +959,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
- # variables. This is a low-prioroty problem and is not a
+ # variables. This is a low-priority problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
index 49772d1f4d8103..b0b51a08a6db48 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -6,9 +6,10 @@
""" Unit tests for the xcode.py file. """
-import gyp.generator.xcode as xcode
-import unittest
import sys
+import unittest
+
+from gyp.generator import xcode
class TestEscapeXcodeDefine(unittest.TestCase):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
index 7150269cda585e..994bf6625fb81d 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -4,9 +4,6 @@
import ast
-
-import gyp.common
-import gyp.simple_copy
import multiprocessing
import os.path
import re
@@ -16,10 +13,13 @@
import sys
import threading
import traceback
-from gyp.common import GypError
-from gyp.common import OrderedSet
+
from packaging.version import Version
+import gyp.common
+import gyp.simple_copy
+from gyp.common import GypError, OrderedSet
+
# A list of types that are treated as linkable.
linkable_types = [
"executable",
@@ -242,7 +242,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check
gyp.common.ExceptionAppend(e, "while reading " + build_file_path)
raise
- if type(build_file_data) is not dict:
+ if not isinstance(build_file_data, dict):
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
@@ -303,20 +303,20 @@ def LoadBuildFileIncludesIntoDict(
# Recurse into subdictionaries.
for k, v in subdict.items():
- if type(v) is dict:
+ if isinstance(v, dict):
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check)
- elif type(v) is list:
+ elif isinstance(v, list):
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check)
# This recurses into lists so that it can look for dicts.
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
for item in sublist:
- if type(item) is dict:
+ if isinstance(item, dict):
LoadBuildFileIncludesIntoDict(
item, sublist_path, data, aux_data, None, check
)
- elif type(item) is list:
+ elif isinstance(item, list):
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
@@ -350,9 +350,9 @@ def ProcessToolsetsInDict(data):
data["targets"] = new_target_list
if "conditions" in data:
for condition in data["conditions"]:
- if type(condition) is list:
+ if isinstance(condition, list):
for condition_dict in condition[1:]:
- if type(condition_dict) is dict:
+ if isinstance(condition_dict, dict):
ProcessToolsetsInDict(condition_dict)
@@ -694,7 +694,7 @@ def IsStrCanonicalInt(string):
The canonical form is such that str(int(string)) == string.
"""
- if type(string) is str:
+ if isinstance(string, str):
# This function is called a lot so for maximum performance, avoid
# involving regexps which would otherwise make the code much
# shorter. Regexps would need twice the time of this function.
@@ -744,7 +744,7 @@ def IsStrCanonicalInt(string):
def FixupPlatformCommand(cmd):
if sys.platform == "win32":
- if type(cmd) is list:
+ if isinstance(cmd, list):
cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:]
else:
cmd = re.sub("^cat ", "type ", cmd)
@@ -870,7 +870,8 @@ def ExpandVariables(input, phase, variables, build_file):
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
- contents_list = contents if type(contents) is list else contents.split(" ")
+ contents_list = (contents if isinstance(contents, list)
+ else contents.split(" "))
replacement = contents_list[0]
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
@@ -989,29 +990,28 @@ def ExpandVariables(input, phase, variables, build_file):
)
replacement = cached_value
- else:
- if contents not in variables:
- if contents[-1] in ["!", "/"]:
- # In order to allow cross-compiles (nacl) to happen more naturally,
- # we will allow references to >(sources/) etc. to resolve to
- # and empty list if undefined. This allows actions to:
- # 'action!': [
- # '>@(_sources!)',
- # ],
- # 'action/': [
- # '>@(_sources/)',
- # ],
- replacement = []
- else:
- raise GypError(
- "Undefined variable " + contents + " in " + build_file
- )
+ elif contents not in variables:
+ if contents[-1] in ["!", "/"]:
+ # In order to allow cross-compiles (nacl) to happen more naturally,
+ # we will allow references to >(sources/) etc. to resolve to
+ # and empty list if undefined. This allows actions to:
+ # 'action!': [
+ # '>@(_sources!)',
+ # ],
+ # 'action/': [
+ # '>@(_sources/)',
+ # ],
+ replacement = []
else:
- replacement = variables[contents]
+ raise GypError(
+ "Undefined variable " + contents + " in " + build_file
+ )
+ else:
+ replacement = variables[contents]
if isinstance(replacement, bytes) and not isinstance(replacement, str):
replacement = replacement.decode("utf-8") # done on Python 3 only
- if type(replacement) is list:
+ if isinstance(replacement, list):
for item in replacement:
if isinstance(item, bytes) and not isinstance(item, str):
item = item.decode("utf-8") # done on Python 3 only
@@ -1042,7 +1042,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
- if type(replacement) is list:
+ if isinstance(replacement, list):
# If it's already a list, make a copy.
output = replacement[:]
else:
@@ -1051,7 +1051,7 @@ def ExpandVariables(input, phase, variables, build_file):
else:
# Expanding in string context.
encoded_replacement = ""
- if type(replacement) is list:
+ if isinstance(replacement, list):
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
@@ -1073,7 +1073,7 @@ def ExpandVariables(input, phase, variables, build_file):
if output == input:
gyp.DebugOutput(
gyp.DEBUG_VARIABLES,
- "Found only identity matches on %r, avoiding infinite " "recursion.",
+ "Found only identity matches on %r, avoiding infinite recursion.",
output,
)
else:
@@ -1081,8 +1081,8 @@ def ExpandVariables(input, phase, variables, build_file):
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if type(output) is list:
- if output and type(output[0]) is list:
+ if isinstance(output, list):
+ if output and isinstance(output[0], list):
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
@@ -1097,7 +1097,7 @@ def ExpandVariables(input, phase, variables, build_file):
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
- if type(output) is list:
+ if isinstance(output, list):
for index, outstr in enumerate(output):
if IsStrCanonicalInt(outstr):
output[index] = int(outstr)
@@ -1115,7 +1115,7 @@ def ExpandVariables(input, phase, variables, build_file):
def EvalCondition(condition, conditions_key, phase, variables, build_file):
"""Returns the dict that should be used or None if the result was
that nothing should be used."""
- if type(condition) is not list:
+ if not isinstance(condition, list):
raise GypError(conditions_key + " must be a list")
if len(condition) < 2:
# It's possible that condition[0] won't work in which case this
@@ -1133,12 +1133,12 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file):
while i < len(condition):
cond_expr = condition[i]
true_dict = condition[i + 1]
- if type(true_dict) is not dict:
+ if not isinstance(true_dict, dict):
raise GypError(
f"{conditions_key} {cond_expr} must be followed by a dictionary, "
f"not {type(true_dict)}"
)
- if len(condition) > i + 2 and type(condition[i + 2]) is dict:
+ if len(condition) > i + 2 and isinstance(condition[i + 2], dict):
false_dict = condition[i + 2]
i = i + 3
if i != len(condition):
@@ -1239,7 +1239,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
)
if merge_dict is not None:
- # Expand variables and nested conditinals in the merge_dict before
+ # Expand variables and nested conditionals in the merge_dict before
# merging it.
ProcessVariablesAndConditionsInDict(
merge_dict, phase, variables, build_file
@@ -1320,7 +1320,7 @@ def ProcessVariablesAndConditionsInDict(
for key, value in the_dict.items():
# Skip "variables", which was already processed if present.
- if key != "variables" and type(value) is str:
+ if key != "variables" and isinstance(value, str):
expanded = ExpandVariables(value, phase, variables, build_file)
if type(expanded) not in (str, int):
raise ValueError(
@@ -1383,21 +1383,21 @@ def ProcessVariablesAndConditionsInDict(
for key, value in the_dict.items():
# Skip "variables" and string values, which were already processed if
# present.
- if key == "variables" or type(value) is str:
+ if key == "variables" or isinstance(value, str):
continue
- if type(value) is dict:
+ if isinstance(value, dict):
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(
value, phase, variables, build_file, key
)
- elif type(value) is list:
+ elif isinstance(value, list):
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables, build_file)
- elif type(value) is not int:
+ elif not isinstance(value, int):
raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key)
@@ -1406,17 +1406,17 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
index = 0
while index < len(the_list):
item = the_list[index]
- if type(item) is dict:
+ if isinstance(item, dict):
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
- elif type(item) is list:
+ elif isinstance(item, list):
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
- elif type(item) is str:
+ elif isinstance(item, str):
expanded = ExpandVariables(item, phase, variables, build_file)
if type(expanded) in (str, int):
the_list[index] = expanded
- elif type(expanded) is list:
+ elif isinstance(expanded, list):
the_list[index : index + 1] = expanded
index += len(expanded)
@@ -1431,7 +1431,7 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
+ " at "
+ index
)
- elif type(item) is not int:
+ elif not isinstance(item, int):
raise TypeError(
"Unknown type " + item.__class__.__name__ + " at index " + index
)
@@ -2232,18 +2232,18 @@ def is_in_set_or_list(x, s, items):
# The cheap and easy case.
to_item = MakePathRelative(to_file, fro_file, item) if is_paths else item
- if not (type(item) is str and item.startswith("-")):
+ if not (isinstance(item, str) and item.startswith("-")):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
- elif type(item) is dict:
+ elif isinstance(item, dict):
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
- elif type(item) is list:
+ elif isinstance(item, list):
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
@@ -2312,12 +2312,12 @@ def MergeDicts(to, fro, to_file, fro_file):
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
- elif type(v) is dict:
+ elif isinstance(v, dict):
# Recurse, guaranteeing copies will be made of objects that require it.
if k not in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
- elif type(v) is list:
+ elif isinstance(v, list):
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
@@ -2361,7 +2361,7 @@ def MergeDicts(to, fro, to_file, fro_file):
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
- elif type(to[list_base]) is not list:
+ elif not isinstance(to[list_base], list):
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError(
@@ -2468,11 +2468,8 @@ def SetUpConfigurations(target, target_dict):
merged_configurations[configuration] = new_configuration_dict
# Put the new configurations back into the target dict as a configuration.
- for configuration in merged_configurations:
- target_dict["configurations"][configuration] = merged_configurations[
- configuration
- ]
-
+ for configuration, value in merged_configurations.items():
+ target_dict["configurations"][configuration] = value
# Now drop all the abstract ones.
configs = target_dict["configurations"]
target_dict["configurations"] = {
@@ -2542,7 +2539,7 @@ def ProcessListFiltersInDict(name, the_dict):
if operation not in {"!", "/"}:
continue
- if type(value) is not list:
+ if not isinstance(value, list):
raise ValueError(
name + " key " + key + " must be list, not " + value.__class__.__name__
)
@@ -2555,7 +2552,7 @@ def ProcessListFiltersInDict(name, the_dict):
del_lists.append(key)
continue
- if type(the_dict[list_key]) is not list:
+ if not isinstance(the_dict[list_key], list):
value = the_dict[list_key]
raise ValueError(
name
@@ -2668,17 +2665,17 @@ def ProcessListFiltersInDict(name, the_dict):
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.items():
- if type(value) is dict:
+ if isinstance(value, dict):
ProcessListFiltersInDict(key, value)
- elif type(value) is list:
+ elif isinstance(value, list):
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
- if type(item) is dict:
+ if isinstance(item, dict):
ProcessListFiltersInDict(name, item)
- elif type(item) is list:
+ elif isinstance(item, list):
ProcessListFiltersInList(name, item)
@@ -2788,7 +2785,7 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
run_as = target_dict.get("run_as")
if not run_as:
return
- if type(run_as) is not dict:
+ if not isinstance(run_as, dict):
raise GypError(
"The 'run_as' in target %s from file %s should be a "
"dictionary." % (target_name, build_file)
@@ -2799,19 +2796,19 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
"The 'run_as' in target %s from file %s must have an "
"'action' section." % (target_name, build_file)
)
- if type(action) is not list:
+ if not isinstance(action, list):
raise GypError(
"The 'action' for 'run_as' in target %s from file %s "
"must be a list." % (target_name, build_file)
)
working_directory = run_as.get("working_directory")
- if working_directory and type(working_directory) is not str:
+ if working_directory and not isinstance(working_directory, str):
raise GypError(
"The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." % (target_name, build_file)
)
environment = run_as.get("environment")
- if environment and type(environment) is not dict:
+ if environment and not isinstance(environment, dict):
raise GypError(
"The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." % (target_name, build_file)
@@ -2843,15 +2840,15 @@ def TurnIntIntoStrInDict(the_dict):
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
- if type(v) is int:
+ if isinstance(v, int):
v = str(v)
the_dict[k] = v
- elif type(v) is dict:
+ elif isinstance(v, dict):
TurnIntIntoStrInDict(v)
- elif type(v) is list:
+ elif isinstance(v, list):
TurnIntIntoStrInList(v)
- if type(k) is int:
+ if isinstance(k, int):
del the_dict[k]
the_dict[str(k)] = v
@@ -2860,11 +2857,11 @@ def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
for index, item in enumerate(the_list):
- if type(item) is int:
+ if isinstance(item, int):
the_list[index] = str(item)
- elif type(item) is dict:
+ elif isinstance(item, dict):
TurnIntIntoStrInDict(item)
- elif type(item) is list:
+ elif isinstance(item, list):
TurnIntIntoStrInList(item)
@@ -3019,8 +3016,8 @@ def Load(
del target_dict[key]
ProcessListFiltersInDict(target_name, tmp_dict)
# Write the results back to |target_dict|.
- for key in tmp_dict:
- target_dict[key] = tmp_dict[key]
+ for key, value in tmp_dict.items():
+ target_dict[key] = value
# Make sure every dependency appears at most once.
RemoveDuplicateDependencies(targets)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
index a18f72e9ebb0a7..ff8c8fbecc3e53 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
@@ -6,9 +6,10 @@
"""Unit tests for the input.py file."""
-import gyp.input
import unittest
+import gyp.input
+
class TestFindCycles(unittest.TestCase):
def setUp(self):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
index 59647c9a890349..70aab4f1787f44 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -59,9 +59,7 @@ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
- elif extension == ".xib":
- return self._CopyXIBFile(source, dest)
- elif extension == ".storyboard":
+ elif extension in {".xib", ".storyboard"}:
return self._CopyXIBFile(source, dest)
elif extension == ".strings" and not convert_to_binary:
self._CopyStringsFile(source, dest)
@@ -70,7 +68,7 @@ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
os.unlink(dest)
shutil.copy(source, dest)
- if convert_to_binary and extension in (".plist", ".strings"):
+ if convert_to_binary and extension in {".plist", ".strings"}:
self._ConvertToBinary(dest)
def _CopyXIBFile(self, source, dest):
@@ -164,9 +162,7 @@ def _DetectInputEncoding(self, file_name):
header = fp.read(3)
except Exception:
return None
- if header.startswith(b"\xFE\xFF"):
- return "UTF-16"
- elif header.startswith(b"\xFF\xFE"):
+ if header.startswith((b"\xFE\xFF", b"\xFF\xFE")):
return "UTF-16"
elif header.startswith(b"\xEF\xBB\xBF"):
return "UTF-8"
@@ -261,7 +257,7 @@ def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(
- r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
+ r"^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$"
)
libtool_re5 = re.compile(
r"^.*libtool: warning for library: "
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
index adda5a0273f8a6..ace0cae5ebff23 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -7,15 +7,15 @@
build systems, primarily ninja.
"""
-import collections
import os
import re
import subprocess
import sys
+from collections import namedtuple
-from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
+from gyp.common import OrderedSet
windows_quoter_regex = re.compile(r'(\\*)"')
@@ -933,7 +933,7 @@ def BuildCygwinBashCommandLine(self, args, path_to_base):
)
return cmd
- RuleShellFlags = collections.namedtuple("RuleShellFlags", ["cygwin", "quote"])
+ RuleShellFlags = namedtuple("RuleShellFlags", ["cygwin", "quote"]) # noqa: PYI024
def GetRuleShellFlags(self, rule):
"""Return RuleShellFlags about how the given rule should be run. This
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
index 171d7295747fcd..7e647f40a84c54 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -13,9 +13,9 @@
import os
import re
import shutil
-import subprocess
import stat
import string
+import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
index 5f2c097f63e1f4..85a63dfd7ae0e2 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -9,13 +9,14 @@
import copy
-import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
+
+import gyp.common
from gyp.common import GypError
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
@@ -471,17 +472,14 @@ def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
- assert self.spec["type"] in (
+ assert self.spec["type"] in {
"executable",
"shared_library",
"static_library",
"loadable_module",
- ), ("Unexpected type %s" % self.spec["type"])
+ }, ("Unexpected type %s" % self.spec["type"])
target = self.spec["target_name"]
- if self.spec["type"] == "static_library":
- if target[:3] == "lib":
- target = target[3:]
- elif self.spec["type"] in ("loadable_module", "shared_library"):
+ if self.spec["type"] in {"loadable_module", "shared_library", "static_library"}:
if target[:3] == "lib":
target = target[3:]
@@ -1127,8 +1125,8 @@ def _GetIOSPostbuilds(self, configname, output_binary):
be deployed to a device. This should be run as the very last step of the
build."""
if not (
- self.isIOS
- and (self.spec["type"] == "executable" or self._IsXCTest())
+ (self.isIOS
+ and (self.spec["type"] == "executable" or self._IsXCTest()))
or self.IsIosFramework()
):
return []
@@ -1174,8 +1172,9 @@ def _GetIOSPostbuilds(self, configname, output_binary):
# Then re-sign everything with 'preserve=True'
postbuilds.extend(
[
- '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ '%s %s code-sign-bundle "%s" "%s" "%s" "%s" %s'
% (
+ sys.executable,
os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
key,
settings.get("CODE_SIGN_ENTITLEMENTS", ""),
@@ -1190,8 +1189,9 @@ def _GetIOSPostbuilds(self, configname, output_binary):
for target in targets:
postbuilds.extend(
[
- '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ '%s %s code-sign-bundle "%s" "%s" "%s" "%s" %s'
% (
+ sys.executable,
os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
key,
settings.get("CODE_SIGN_ENTITLEMENTS", ""),
@@ -1204,8 +1204,9 @@ def _GetIOSPostbuilds(self, configname, output_binary):
postbuilds.extend(
[
- '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ '%s %s code-sign-bundle "%s" "%s" "%s" "%s" %s'
% (
+ sys.executable,
os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
key,
settings.get("CODE_SIGN_ENTITLEMENTS", ""),
@@ -1858,7 +1859,7 @@ def _TopologicallySortedEnvVarKeys(env):
regex = re.compile(r"\$\{([a-zA-Z0-9\-_]+)\}")
def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
+ # Use a definition of edges such that user_of_variable -> used_variable.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py
index 98b02320d5a9ee..03cbbaea84601e 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py
@@ -2,10 +2,11 @@
"""Unit tests for the xcode_emulation.py file."""
-from gyp.xcode_emulation import XcodeSettings
import sys
import unittest
+from gyp.xcode_emulation import XcodeSettings
+
class TestXcodeSettings(unittest.TestCase):
def setUp(self):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
index bb74eacbeaf4ae..cac1af56f7bfb7 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -13,11 +13,12 @@
"""
import errno
-import gyp.generator.ninja
import os
import re
import xml.sax.saxutils
+import gyp.generator.ninja
+
def _WriteWorkspace(main_gyp, sources_gyp, params):
""" Create a workspace to wrap main and sources gyp paths. """
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
index 33c667c266bf69..be17ef946dce35 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -74,7 +74,7 @@
PBXBuildFile appears extraneous, but there's actually one reason for this:
file-specific compiler flags are added to the PBXBuildFile object so as to
allow a single file to be a member of multiple targets while having distinct
-compiler flags for each. These flags can be modified in the Xcode applciation
+compiler flags for each. These flags can be modified in the Xcode application
in the "Build" tab of a File Info window.
When a project is open in the Xcode application, Xcode will rewrite it. As
@@ -137,14 +137,15 @@
a project file is output.
"""
-import gyp.common
-from functools import cmp_to_key
import hashlib
-from operator import attrgetter
import posixpath
import re
import struct
import sys
+from functools import cmp_to_key
+from operator import attrgetter
+
+import gyp.common
def cmp(x, y):
@@ -460,7 +461,7 @@ def _HashUpdate(hash, data):
digest_int_count = hash.digest_size // 4
digest_ints = struct.unpack(">" + "I" * digest_int_count, hash.digest())
id_ints = [0, 0, 0]
- for index in range(0, digest_int_count):
+ for index in range(digest_int_count):
id_ints[index % 3] ^= digest_ints[index]
self.id = "%08X%08X%08X" % tuple(id_ints)
@@ -662,7 +663,7 @@ def _XCKVPrint(self, file, tabs, key, value):
tabs is an int identifying the indentation level. If the class'
_should_print_single_line variable is True, tabs is ignored and the
- key-value pair will be followed by a space insead of a newline.
+ key-value pair will be followed by a space instead of a newline.
"""
if self._should_print_single_line:
@@ -781,7 +782,7 @@ def UpdateProperties(self, properties, do_copy=False):
# Make sure the property conforms to the schema.
(is_list, property_type, is_strong) = self._schema[property][0:3]
if is_list:
- if value.__class__ != list:
+ if not isinstance(value, list):
raise TypeError(
property
+ " of "
@@ -791,7 +792,7 @@ def UpdateProperties(self, properties, do_copy=False):
)
for item in value:
if not isinstance(item, property_type) and not (
- isinstance(item, str) and property_type == str
+ isinstance(item, str) and isinstance(property_type, str)
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@@ -806,7 +807,7 @@ def UpdateProperties(self, properties, do_copy=False):
+ item.__class__.__name__
)
elif not isinstance(value, property_type) and not (
- isinstance(value, str) and property_type == str
+ isinstance(value, str) and isinstance(property_type, str)
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@@ -1640,7 +1641,6 @@ class PBXVariantGroup(PBXGroup, XCFileLikeElement):
"""PBXVariantGroup is used by Xcode to represent localizations."""
# No additions to the schema relative to PBXGroup.
- pass
# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
@@ -1766,9 +1766,8 @@ def GetBuildSetting(self, key):
configuration_value = configuration.GetBuildSetting(key)
if value is None:
value = configuration_value
- else:
- if value != configuration_value:
- raise ValueError("Variant values for " + key)
+ elif value != configuration_value:
+ raise ValueError("Variant values for " + key)
return value
@@ -1924,14 +1923,13 @@ def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
# It's best when the caller provides the path.
if isinstance(xcfilelikeelement, PBXVariantGroup):
paths.append(path)
+ # If the caller didn't provide a path, there can be either multiple
+ # paths (PBXVariantGroup) or one.
+ elif isinstance(xcfilelikeelement, PBXVariantGroup):
+ for variant in xcfilelikeelement._properties["children"]:
+ paths.append(variant.FullPath())
else:
- # If the caller didn't provide a path, there can be either multiple
- # paths (PBXVariantGroup) or one.
- if isinstance(xcfilelikeelement, PBXVariantGroup):
- for variant in xcfilelikeelement._properties["children"]:
- paths.append(variant.FullPath())
- else:
- paths.append(xcfilelikeelement.FullPath())
+ paths.append(xcfilelikeelement.FullPath())
# Add the paths first, because if something's going to raise, the
# messages provided by _AddPathToDict are more useful owing to its
@@ -2994,7 +2992,7 @@ def AddOrGetProjectReference(self, other_pbxproject):
key=lambda x: x["ProjectRef"].Name().lower()
)
else:
- # The link already exists. Pull out the relevnt data.
+ # The link already exists. Pull out the relevant data.
project_ref_dict = self._other_pbxprojects[other_pbxproject]
product_group = project_ref_dict["ProductGroup"]
project_ref = project_ref_dict["ProjectRef"]
@@ -3017,10 +3015,10 @@ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
symroots = self._DefinedSymroots(target)
for s in self._DefinedSymroots(target):
if (
- s is not None
- and not self._IsUniqueSymrootForTarget(s)
- or s is None
- and not inherit_unique_symroot
+ (s is not None
+ and not self._IsUniqueSymrootForTarget(s))
+ or (s is None
+ and not inherit_unique_symroot)
):
return False
return True if symroots else inherit_unique_symroot
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
index fb274930799da0..23bb564f3d5ff8 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
@@ -145,7 +145,7 @@ class RawMetadata(TypedDict, total=False):
# Metadata 2.3 - PEP 685
# No new fields were added in PEP 685, just some edge case were
- # tightened up to provide better interoptability.
+ # tightened up to provide better interoperability.
_STRING_FIELDS = {
@@ -206,10 +206,10 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
# be the missing value, then they'd have multiple '' values that
# overwrite each other in a accumulating dict.
#
- # The other potentional issue is that it's possible to have the
+ # The other potential issue is that it's possible to have the
# same label multiple times in the metadata, with no solid "right"
# answer with what to do in that case. As such, we'll do the only
- # thing we can, which is treat the field as unparseable and add it
+ # thing we can, which is treat the field as unparsable and add it
# to our list of unparsed fields.
parts = [p.strip() for p in pair.split(",", 1)]
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
@@ -222,8 +222,8 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
label, url = parts
if label in urls:
# The label already exists in our set of urls, so this field
- # is unparseable, and we can just add the whole thing to our
- # unparseable data and stop processing it.
+ # is unparsable, and we can just add the whole thing to our
+ # unparsable data and stop processing it.
raise KeyError("duplicate labels in project urls")
urls[label] = url
@@ -433,7 +433,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
except KeyError:
unparsed[name] = value
# Nothing that we've done has managed to parse this, so it'll just
- # throw it in our unparseable data and move on.
+ # throw it in our unparsable data and move on.
else:
unparsed[name] = value
@@ -450,7 +450,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
else:
if payload:
# Check to see if we've already got a description, if so then both
- # it, and this body move to unparseable.
+ # it, and this body move to unparsable.
if "description" in raw:
description_header = cast(str, raw.pop("description"))
unparsed.setdefault("description", []).extend(
diff --git a/deps/npm/node_modules/node-gyp/gyp/pyproject.toml b/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
index def9858e444c55..537308731fe542 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
+++ b/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "gyp-next"
-version = "0.18.1"
+version = "0.20.0"
authors = [
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
]
@@ -92,15 +92,9 @@ select = [
# "TRY", # tryceratops
]
ignore = [
- "E721",
- "PLC1901",
- "PLR0402",
"PLR1714",
- "PLR2004",
- "PLR5501",
"PLW0603",
"PLW2901",
- "PYI024",
"RUF005",
"RUF012",
"UP031",
@@ -110,6 +104,7 @@ ignore = [
max-complexity = 101
[tool.ruff.lint.pylint]
+allow-magic-value-types = ["float", "int", "str"]
max-args = 11
max-branches = 108
max-returns = 10
diff --git a/deps/npm/node_modules/node-gyp/lib/build.js b/deps/npm/node_modules/node-gyp/lib/build.js
index e1f49bb6ff0caa..9c0cca8fc2634b 100644
--- a/deps/npm/node_modules/node-gyp/lib/build.js
+++ b/deps/npm/node_modules/node-gyp/lib/build.js
@@ -3,7 +3,7 @@
const gracefulFs = require('graceful-fs')
const fs = gracefulFs.promises
const path = require('path')
-const { glob } = require('glob')
+const { glob } = require('tinyglobby')
const log = require('./log')
const which = require('which')
const win = process.platform === 'win32'
@@ -84,9 +84,10 @@ async function build (gyp, argv) {
*/
async function findSolutionFile () {
- const files = await glob('build/*.sln')
+ const files = await glob('build/*.sln', { expandDirectories: false })
if (files.length === 0) {
- if (gracefulFs.existsSync('build/Makefile') || (await glob('build/*.mk')).length !== 0) {
+ if (gracefulFs.existsSync('build/Makefile') ||
+ (await glob('build/*.mk', { expandDirectories: false })).length !== 0) {
command = makeCommand
await doWhich(false)
return
@@ -141,6 +142,8 @@ async function build (gyp, argv) {
if (msvs) {
// Turn off the Microsoft logo on Windows
argv.push('/nologo')
+ // No lingering msbuild processes and open file handles
+ argv.push('/nodeReuse:false')
}
// Specify the build type, Release by default
@@ -209,7 +212,7 @@ async function build (gyp, argv) {
await new Promise((resolve, reject) => proc.on('exit', async (code, signal) => {
if (buildBinsDir) {
// Clean up the build-time dependency symlinks:
- await fs.rm(buildBinsDir, { recursive: true })
+ await fs.rm(buildBinsDir, { recursive: true, maxRetries: 3 })
}
if (code !== 0) {
diff --git a/deps/npm/node_modules/node-gyp/lib/clean.js b/deps/npm/node_modules/node-gyp/lib/clean.js
index 523f8016caecef..479c374f10fa2e 100644
--- a/deps/npm/node_modules/node-gyp/lib/clean.js
+++ b/deps/npm/node_modules/node-gyp/lib/clean.js
@@ -8,7 +8,7 @@ async function clean (gyp, argv) {
const buildDir = 'build'
log.verbose('clean', 'removing "%s" directory', buildDir)
- await fs.rm(buildDir, { recursive: true, force: true })
+ await fs.rm(buildDir, { recursive: true, force: true, maxRetries: 3 })
}
module.exports = clean
diff --git a/deps/npm/node_modules/node-gyp/lib/find-visualstudio.js b/deps/npm/node_modules/node-gyp/lib/find-visualstudio.js
index 2dc1930fd78280..e9aa7fafdc98a4 100644
--- a/deps/npm/node_modules/node-gyp/lib/find-visualstudio.js
+++ b/deps/npm/node_modules/node-gyp/lib/find-visualstudio.js
@@ -145,6 +145,7 @@ class VisualStudioFinder {
version: process.env.VSCMD_VER,
packages: [
'Microsoft.VisualStudio.Component.VC.Tools.x86.x64',
+ 'Microsoft.VisualStudio.Component.VC.Tools.ARM64',
// Assume MSBuild exists. It will be checked in processing.
'Microsoft.VisualStudio.VC.MSBuild.Base'
]
@@ -429,12 +430,21 @@ class VisualStudioFinder {
// Helper - process toolset information
getToolset (info, versionYear) {
- const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64'
+ const vcToolsArm64 = 'VC.Tools.ARM64'
+ const pkgArm64 = `Microsoft.VisualStudio.Component.${vcToolsArm64}`
+ const vcToolsX64 = 'VC.Tools.x86.x64'
+ const pkgX64 = `Microsoft.VisualStudio.Component.${vcToolsX64}`
const express = 'Microsoft.VisualStudio.WDExpress'
- if (info.packages.indexOf(pkg) !== -1) {
- this.log.silly('- found VC.Tools.x86.x64')
- } else if (info.packages.indexOf(express) !== -1) {
+ if (process.arch === 'arm64' && info.packages.includes(pkgArm64)) {
+ this.log.silly(`- found ${vcToolsArm64}`)
+ } else if (info.packages.includes(pkgX64)) {
+ if (process.arch === 'arm64') {
+ this.addLog(`- found ${vcToolsX64} on ARM64 platform. Expect less performance and/or link failure with ARM64 binary.`)
+ } else {
+ this.log.silly(`- found ${vcToolsX64}`)
+ }
+ } else if (info.packages.includes(express)) {
this.log.silly('- found Visual Studio Express (looking for toolset)')
} else {
return null
diff --git a/deps/npm/node_modules/node-gyp/lib/install.js b/deps/npm/node_modules/node-gyp/lib/install.js
index 7196a316296fb8..90be86c822c8fb 100644
--- a/deps/npm/node_modules/node-gyp/lib/install.js
+++ b/deps/npm/node_modules/node-gyp/lib/install.js
@@ -284,7 +284,7 @@ async function install (gyp, argv) {
if (tarExtractDir !== devDir) {
try {
// try to cleanup temp dir
- await fs.rm(tarExtractDir, { recursive: true })
+ await fs.rm(tarExtractDir, { recursive: true, maxRetries: 3 })
} catch {
log.warn('failed to clean up temp tarball extract directory')
}
diff --git a/deps/npm/node_modules/node-gyp/lib/remove.js b/deps/npm/node_modules/node-gyp/lib/remove.js
index 7efdb01a662e76..55736f71d97c5b 100644
--- a/deps/npm/node_modules/node-gyp/lib/remove.js
+++ b/deps/npm/node_modules/node-gyp/lib/remove.js
@@ -36,7 +36,7 @@ async function remove (gyp, argv) {
throw err
}
- await fs.rm(versionPath, { recursive: true, force: true })
+ await fs.rm(versionPath, { recursive: true, force: true, maxRetries: 3 })
}
module.exports = remove
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9ea..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc99..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index ad65eef0495076..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,352 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const zlib_1 = __importDefault(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new zlib_1.default[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = buffer_1.Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- buffer_1.Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- buffer_1.Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- buffer_1.Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
- (0, assert_1.default)(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d0..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minizlib/package.json b/deps/npm/node_modules/node-gyp/node_modules/minizlib/package.json
deleted file mode 100644
index e94623ff43d353..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "name": "minizlib",
- "version": "3.0.1",
- "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "main": "./dist/commonjs/index.js",
- "dependencies": {
- "minipass": "^7.0.4",
- "rimraf": "^5.0.5"
- },
- "scripts": {
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "test": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --loglevel warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minizlib.git"
- },
- "keywords": [
- "zlib",
- "gzip",
- "gunzip",
- "deflate",
- "inflate",
- "compression",
- "zip",
- "unzip"
- ],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "MIT",
- "devDependencies": {
- "@types/node": "^20.11.29",
- "mkdirp": "^3.0.1",
- "tap": "^18.7.1",
- "tshy": "^1.12.0",
- "typedoc": "^0.25.12"
- },
- "files": [
- "dist"
- ],
- "engines": {
- "node": ">= 18"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 75,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- }
-}
diff --git a/deps/npm/node_modules/node-gyp/package.json b/deps/npm/node_modules/node-gyp/package.json
index 4a1cfb0eb1a283..f69a022ef3d12b 100644
--- a/deps/npm/node_modules/node-gyp/package.json
+++ b/deps/npm/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
"bindings",
"gyp"
],
- "version": "11.0.0",
+ "version": "11.2.0",
"installVersion": 11,
"author": "Nathan Rajlich (http://tootallnate.net)",
"repository": {
@@ -24,13 +24,13 @@
"dependencies": {
"env-paths": "^2.2.0",
"exponential-backoff": "^3.1.1",
- "glob": "^10.3.10",
"graceful-fs": "^4.2.6",
"make-fetch-happen": "^14.0.3",
"nopt": "^8.0.0",
"proc-log": "^5.0.0",
"semver": "^7.3.5",
"tar": "^7.4.3",
+ "tinyglobby": "^0.2.12",
"which": "^5.0.0"
},
"engines": {
diff --git a/deps/npm/node_modules/node-gyp/src/win_delay_load_hook.cc b/deps/npm/node_modules/node-gyp/src/win_delay_load_hook.cc
index 169f8029f10fd2..63e197706d4661 100644
--- a/deps/npm/node_modules/node-gyp/src/win_delay_load_hook.cc
+++ b/deps/npm/node_modules/node-gyp/src/win_delay_load_hook.cc
@@ -28,7 +28,9 @@ static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
if (_stricmp(info->szDll, HOST_BINARY) != 0)
return NULL;
- m = GetModuleHandle(NULL);
+ // try for libnode.dll to compat node.js that using 'vcbuild.bat dll'
+ m = GetModuleHandle(TEXT("libnode.dll"));
+ if (m == NULL) m = GetModuleHandle(NULL);
return (FARPROC) m;
}
diff --git a/deps/npm/node_modules/nopt/README.md b/deps/npm/node_modules/nopt/README.md
index a99531c04655fe..19ef097bb2c220 100644
--- a/deps/npm/node_modules/nopt/README.md
+++ b/deps/npm/node_modules/nopt/README.md
@@ -141,14 +141,15 @@ config object and remove its invalid properties.
## Error Handling
-By default, nopt outputs a warning to standard error when invalid values for
-known options are found. You can change this behavior by assigning a method
-to `nopt.invalidHandler`. This method will be called with
-the offending `nopt.invalidHandler(key, val, types)`.
-
-If no `nopt.invalidHandler` is assigned, then it will console.error
-its whining. If it is assigned to boolean `false` then the warning is
-suppressed.
+By default nopt logs debug messages if `DEBUG_NOPT` or `NOPT_DEBUG` are set in the environment.
+
+You can assign the following methods to `nopt` for a more granular notification of invalid, unknown, and expanding options:
+
+`nopt.invalidHandler(key, value, type, data)` - Called when a value is invalid for its option.
+`nopt.unknownHandler(key, next)` - Called when an option is found that has no configuration. In certain situations the next option on the command line will be parsed on its own instead of as part of the unknown option. In this case `next` will contain that option.
+`nopt.abbrevHandler(short, long)` - Called when an option is automatically translated via abbreviations.
+
+You can also set any of these to `false` to disable the debugging messages that they generate.
## Abbreviations
diff --git a/deps/npm/node_modules/nopt/lib/nopt-lib.js b/deps/npm/node_modules/nopt/lib/nopt-lib.js
index d3d1de0255ba9b..441c9cc30377af 100644
--- a/deps/npm/node_modules/nopt/lib/nopt-lib.js
+++ b/deps/npm/node_modules/nopt/lib/nopt-lib.js
@@ -25,7 +25,9 @@ function nopt (args, {
types,
shorthands,
typeDefs,
- invalidHandler,
+ invalidHandler, // opt is configured but its value does not validate against given type
+ unknownHandler, // opt is not configured
+ abbrevHandler, // opt is being expanded via abbrev
typeDefault,
dynamicTypes,
} = {}) {
@@ -38,7 +40,9 @@ function nopt (args, {
original: args.slice(0),
}
- parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands })
+ parse(args, data, argv.remain, {
+ typeDefs, types, dynamicTypes, shorthands, unknownHandler, abbrevHandler,
+ })
// now data is full
clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault })
@@ -247,6 +251,8 @@ function parse (args, data, remain, {
typeDefs = {},
shorthands = {},
dynamicTypes,
+ unknownHandler,
+ abbrevHandler,
} = {}) {
const StringType = typeDefs.String?.type
const NumberType = typeDefs.Number?.type
@@ -282,7 +288,7 @@ function parse (args, data, remain, {
// see if it's a shorthand
// if so, splice and back up to re-parse it.
- const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands })
+ const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands, abbrevHandler })
debug('arg=%j shRes=%j', arg, shRes)
if (shRes) {
args.splice.apply(args, [i, 1].concat(shRes))
@@ -298,7 +304,13 @@ function parse (args, data, remain, {
arg = arg.slice(3)
}
- if (abbrevs[arg]) {
+ // abbrev includes the original full string in its abbrev list
+ if (abbrevs[arg] && abbrevs[arg] !== arg) {
+ if (abbrevHandler) {
+ abbrevHandler(arg, abbrevs[arg])
+ } else if (abbrevHandler !== false) {
+ debug(`abbrev: ${arg} -> ${abbrevs[arg]}`)
+ }
arg = abbrevs[arg]
}
@@ -331,6 +343,23 @@ function parse (args, data, remain, {
(argType === null ||
isTypeArray && ~argType.indexOf(null)))
+ if (typeof argType === 'undefined') {
+ // la is going to unexpectedly be parsed outside the context of this arg
+ const hangingLa = !hadEq && la && !la?.startsWith('-') && !['true', 'false'].includes(la)
+ if (unknownHandler) {
+ if (hangingLa) {
+ unknownHandler(arg, la)
+ } else {
+ unknownHandler(arg)
+ }
+ } else if (unknownHandler !== false) {
+ debug(`unknown: ${arg}`)
+ if (hangingLa) {
+ debug(`unknown: ${la} parsed as normal opt`)
+ }
+ }
+ }
+
if (isBool) {
// just set and move along
val = !no
@@ -420,7 +449,7 @@ const singleCharacters = (arg, shorthands) => {
}
function resolveShort (arg, ...rest) {
- const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {}
+ const { abbrevHandler, types = {}, shorthands = {} } = rest.length ? rest.pop() : {}
const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands))
const abbrevs = rest[1] ?? abbrev(Object.keys(types))
@@ -457,7 +486,13 @@ function resolveShort (arg, ...rest) {
}
// if it's an abbr for a shorthand, then use that
+ // exact match has already happened so we don't need to account for that here
if (shortAbbr[arg]) {
+ if (abbrevHandler) {
+ abbrevHandler(arg, shortAbbr[arg])
+ } else if (abbrevHandler !== false) {
+ debug(`abbrev: ${arg} -> ${shortAbbr[arg]}`)
+ }
arg = shortAbbr[arg]
}
diff --git a/deps/npm/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/nopt/lib/nopt.js
index 37f01a08783f87..9a24342b374aa0 100644
--- a/deps/npm/node_modules/nopt/lib/nopt.js
+++ b/deps/npm/node_modules/nopt/lib/nopt.js
@@ -18,6 +18,8 @@ function nopt (types, shorthands, args = process.argv, slice = 2) {
shorthands: shorthands || {},
typeDefs: exports.typeDefs,
invalidHandler: exports.invalidHandler,
+ unknownHandler: exports.unknownHandler,
+ abbrevHandler: exports.abbrevHandler,
})
}
@@ -26,5 +28,7 @@ function clean (data, types, typeDefs = exports.typeDefs) {
types: types || {},
typeDefs,
invalidHandler: exports.invalidHandler,
+ unknownHandler: exports.unknownHandler,
+ abbrevHandler: exports.abbrevHandler,
})
}
diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE b/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE
deleted file mode 100644
index 9bcfa9d7d8d26e..00000000000000
--- a/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE
+++ /dev/null
@@ -1,46 +0,0 @@
-This software is dual-licensed under the ISC and MIT licenses.
-You may use this software under EITHER of the following licenses.
-
-----------
-
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-----------
-
-Copyright Isaac Z. Schlueter and Contributors
-All rights reserved.
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js
deleted file mode 100644
index 9f48801f049c9e..00000000000000
--- a/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js
+++ /dev/null
@@ -1,50 +0,0 @@
-module.exports = abbrev
-
-function abbrev (...args) {
- let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args
-
- for (let i = 0, l = list.length; i < l; i++) {
- list[i] = typeof list[i] === 'string' ? list[i] : String(list[i])
- }
-
- // sort them lexicographically, so that they're next to their nearest kin
- list = list.sort(lexSort)
-
- // walk through each, seeing how much it has in common with the next and previous
- const abbrevs = {}
- let prev = ''
- for (let ii = 0, ll = list.length; ii < ll; ii++) {
- const current = list[ii]
- const next = list[ii + 1] || ''
- let nextMatches = true
- let prevMatches = true
- if (current === next) {
- continue
- }
- let j = 0
- const cl = current.length
- for (; j < cl; j++) {
- const curChar = current.charAt(j)
- nextMatches = nextMatches && curChar === next.charAt(j)
- prevMatches = prevMatches && curChar === prev.charAt(j)
- if (!nextMatches && !prevMatches) {
- j++
- break
- }
- }
- prev = current
- if (j === cl) {
- abbrevs[current] = current
- continue
- }
- for (let a = current.slice(0, j); j <= cl; j++) {
- abbrevs[a] = current
- a += current.charAt(j)
- }
- }
- return abbrevs
-}
-
-function lexSort (a, b) {
- return a === b ? 0 : a > b ? 1 : -1
-}
diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/package.json b/deps/npm/node_modules/nopt/node_modules/abbrev/package.json
deleted file mode 100644
index e26400445631ad..00000000000000
--- a/deps/npm/node_modules/nopt/node_modules/abbrev/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "abbrev",
- "version": "2.0.0",
- "description": "Like ruby's abbrev module, but in js",
- "author": "GitHub Inc.",
- "main": "lib/index.js",
- "scripts": {
- "test": "tap",
- "lint": "eslint \"**/*.js\"",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "snap": "tap",
- "posttest": "npm run lint"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/abbrev-js.git"
- },
- "license": "ISC",
- "devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.8.0",
- "tap": "^16.3.0"
- },
- "tap": {
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.8.0"
- }
-}
diff --git a/deps/npm/node_modules/nopt/package.json b/deps/npm/node_modules/nopt/package.json
index 508b8e28b59f70..0732ada73c1d00 100644
--- a/deps/npm/node_modules/nopt/package.json
+++ b/deps/npm/node_modules/nopt/package.json
@@ -1,6 +1,6 @@
{
"name": "nopt",
- "version": "8.0.0",
+ "version": "8.1.0",
"description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
"author": "GitHub Inc.",
"main": "lib/nopt.js",
@@ -23,11 +23,11 @@
},
"license": "ISC",
"dependencies": {
- "abbrev": "^2.0.0"
+ "abbrev": "^3.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.6",
"tap": "^16.3.0"
},
"tap": {
@@ -46,7 +46,7 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"windowsCI": false,
- "version": "4.23.3",
+ "version": "4.23.6",
"publish": true
}
}
diff --git a/deps/npm/node_modules/npm-package-arg/lib/npa.js b/deps/npm/node_modules/npm-package-arg/lib/npa.js
index 8094b3e732cd98..d409b7f1becfcc 100644
--- a/deps/npm/node_modules/npm-package-arg/lib/npa.js
+++ b/deps/npm/node_modules/npm-package-arg/lib/npa.js
@@ -1,22 +1,24 @@
'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-const { URL } = require('url')
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
const HostedGit = require('hosted-git-info')
const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
const { log } = require('proc-log')
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
const isURL = /^(?:git[+])?[a-z]+:/i
const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+const isFileType = /[.](?:tgz|tar.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
function npa (arg, where) {
let name
@@ -30,13 +32,14 @@ function npa (arg, where) {
return npa(arg.raw, where || arg.where)
}
}
- const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+ const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
if (isURL.test(arg)) {
spec = arg
} else if (isGit.test(arg)) {
spec = `git+ssh://${arg}`
- } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+ // eslint-disable-next-line max-len
+ } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
spec = arg
} else if (nameEndsAt > 0) {
name = namePart
@@ -53,7 +56,27 @@ function npa (arg, where) {
return resolve(name, spec, where, arg)
}
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+function isFileSpec (spec) {
+ if (!spec) {
+ return false
+ }
+ if (spec.toLowerCase().startsWith('file:')) {
+ return true
+ }
+ if (isWindows) {
+ return isWindowsFile.test(spec)
+ }
+ // We never hit this in windows tests, obviously
+ /* istanbul ignore next */
+ return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+ if (!spec) {
+ return false
+ }
+ return spec.toLowerCase().startsWith('npm:')
+}
function resolve (name, spec, where, arg) {
const res = new Result({
@@ -64,12 +87,16 @@ function resolve (name, spec, where, arg) {
})
if (name) {
- res.setName(name)
+ res.name = name
}
- if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+ if (!where) {
+ where = process.cwd()
+ }
+
+ if (isFileSpec(spec)) {
return fromFile(res, where)
- } else if (spec && /^npm:/i.test(spec)) {
+ } else if (isAliasSpec(spec)) {
return fromAlias(res, where)
}
@@ -81,15 +108,13 @@ function resolve (name, spec, where, arg) {
return fromHostedGit(res, hosted)
} else if (spec && isURL.test(spec)) {
return fromURL(res)
- } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+ } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
return fromFile(res, where)
} else {
return fromRegistry(res)
}
}
-const defaultRegistry = 'https://registry.npmjs.org'
-
function toPurl (arg, reg = defaultRegistry) {
const res = npa(arg)
@@ -127,60 +152,62 @@ function invalidPurlType (type, raw) {
return err
}
-function Result (opts) {
- this.type = opts.type
- this.registry = opts.registry
- this.where = opts.where
- if (opts.raw == null) {
- this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
- } else {
- this.raw = opts.raw
+class Result {
+ constructor (opts) {
+ this.type = opts.type
+ this.registry = opts.registry
+ this.where = opts.where
+ if (opts.raw == null) {
+ this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+ } else {
+ this.raw = opts.raw
+ }
+ this.name = undefined
+ this.escapedName = undefined
+ this.scope = undefined
+ this.rawSpec = opts.rawSpec || ''
+ this.saveSpec = opts.saveSpec
+ this.fetchSpec = opts.fetchSpec
+ if (opts.name) {
+ this.setName(opts.name)
+ }
+ this.gitRange = opts.gitRange
+ this.gitCommittish = opts.gitCommittish
+ this.gitSubdir = opts.gitSubdir
+ this.hosted = opts.hosted
}
- this.name = undefined
- this.escapedName = undefined
- this.scope = undefined
- this.rawSpec = opts.rawSpec || ''
- this.saveSpec = opts.saveSpec
- this.fetchSpec = opts.fetchSpec
- if (opts.name) {
- this.setName(opts.name)
- }
- this.gitRange = opts.gitRange
- this.gitCommittish = opts.gitCommittish
- this.gitSubdir = opts.gitSubdir
- this.hosted = opts.hosted
-}
+ // TODO move this to a getter/setter in a semver major
+ setName (name) {
+ const valid = validatePackageName(name)
+ if (!valid.validForOldPackages) {
+ throw invalidPackageName(name, valid, this.raw)
+ }
-Result.prototype.setName = function (name) {
- const valid = validatePackageName(name)
- if (!valid.validForOldPackages) {
- throw invalidPackageName(name, valid, this.raw)
+ this.name = name
+ this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+ // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+ this.escapedName = name.replace('/', '%2f')
+ return this
}
- this.name = name
- this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
- // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
- this.escapedName = name.replace('/', '%2f')
- return this
-}
-
-Result.prototype.toString = function () {
- const full = []
- if (this.name != null && this.name !== '') {
- full.push(this.name)
- }
- const spec = this.saveSpec || this.fetchSpec || this.rawSpec
- if (spec != null && spec !== '') {
- full.push(spec)
+ toString () {
+ const full = []
+ if (this.name != null && this.name !== '') {
+ full.push(this.name)
+ }
+ const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+ if (spec != null && spec !== '') {
+ full.push(spec)
+ }
+ return full.length ? full.join('@') : this.raw
}
- return full.length ? full.join('@') : this.raw
-}
-Result.prototype.toJSON = function () {
- const result = Object.assign({}, this)
- delete result.hosted
- return result
+ toJSON () {
+ const result = Object.assign({}, this)
+ delete result.hosted
+ return result
+ }
}
// sets res.gitCommittish, res.gitRange, and res.gitSubdir
@@ -227,25 +254,67 @@ function setGitAttrs (res, committish) {
}
}
-function fromFile (res, where) {
- if (!where) {
- where = process.cwd()
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references. We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+ ['\0', '%00'],
+ ['\t', '%09'],
+ ['\n', '%0A'],
+ ['\r', '%0D'],
+ [' ', '%20'],
+ ['"', '%22'],
+ ['#', '%23'],
+ ['%', '%25'],
+ ['?', '%3F'],
+ ['[', '%5B'],
+ ['\\', isWindows ? '/' : '%5C'],
+ [']', '%5D'],
+ ['^', '%5E'],
+ ['|', '%7C'],
+ ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+ let result = ''
+ for (let i = 0; i < str.length; i++) {
+ result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+ }
+ if (result.startsWith('file:')) {
+ return result
}
- res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+ return `file:${result}`
+}
+
+function fromFile (res, where) {
+ res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
res.where = where
- // always put the '/' on where when resolving urls, or else
- // file:foo from /path/to/bar goes to /path/to/foo, when we want
- // it to be /path/to/bar/foo
+ let rawSpec = pathToFileURL(res.rawSpec)
+
+ if (rawSpec.startsWith('file:/')) {
+ // XXX backwards compatibility lack of compliance with RFC 8089
+
+ // turn file://path into file:/path
+ if (/^file:\/\/[^/]/.test(rawSpec)) {
+ rawSpec = `file:/${rawSpec.slice(5)}`
+ }
+
+ // turn file:/../path into file:../path
+ // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+ if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+ rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+ }
+ }
- let specUrl
let resolvedUrl
- const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
- const rawWithPrefix = prefix + res.rawSpec
- let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+ let specUrl
try {
- resolvedUrl = new URL(rawWithPrefix, `file://${path.resolve(where)}/`)
- specUrl = new URL(rawWithPrefix)
+ // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+ resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+ specUrl = new URL(rawSpec)
} catch (originalError) {
const er = new Error('Invalid file: URL, must comply with RFC 8089')
throw Object.assign(er, {
@@ -256,24 +325,6 @@ function fromFile (res, where) {
})
}
- // XXX backwards compatibility lack of compliance with RFC 8089
- if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
- const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
- resolvedUrl = new URL(rawSpec, `file://${path.resolve(where)}/`)
- specUrl = new URL(rawSpec)
- rawNoPrefix = rawSpec.replace(/^file:/, '')
- }
- // turn file:/../foo into file:../foo
- // for 1, 2 or 3 leading slashes since we attempted
- // in the previous step to make it a file protocol url with a leading slash
- if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
- const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
- resolvedUrl = new URL(rawSpec, `file://${path.resolve(where)}/`)
- specUrl = new URL(rawSpec)
- rawNoPrefix = rawSpec.replace(/^file:/, '')
- }
- // XXX end RFC 8089 violation backwards compatibility section
-
// turn /C:/blah into just C:/blah on windows
let specPath = decodeURIComponent(specUrl.pathname)
let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
@@ -287,13 +338,21 @@ function fromFile (res, where) {
if (/^\/~(\/|$)/.test(specPath)) {
res.saveSpec = `file:${specPath.substr(1)}`
resolvedPath = path.resolve(homedir(), specPath.substr(3))
- } else if (!path.isAbsolute(rawNoPrefix)) {
+ } else if (!path.isAbsolute(rawSpec.slice(5))) {
res.saveSpec = `file:${path.relative(where, resolvedPath)}`
} else {
res.saveSpec = `file:${path.resolve(resolvedPath)}`
}
res.fetchSpec = path.resolve(where, resolvedPath)
+ // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+ res.saveSpec = res.saveSpec.split('\\').join('/')
+ // Ignoring because this only happens in windows
+ /* istanbul ignore next */
+ if (res.saveSpec.startsWith('file://')) {
+ // normalization of \\win32\root paths can cause a double / which we don't want
+ res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+ }
return res
}
@@ -324,7 +383,9 @@ function fromURL (res) {
// git+ssh://git@my.custom.git.com:username/project.git#deadbeef
// ...and various combinations. The username in the beginning is *required*.
const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
- if (matched && !matched[1].match(/:[0-9]+\/?.*$/i)) {
+ // Filter out all-number "usernames" which are really port numbers
+ // They can either be :1234 :1234/ or :1234/path but not :12abc
+ if (matched && !matched[1].match(isPortNumber)) {
res.type = 'git'
setGitAttrs(res, matched[2])
res.fetchSpec = matched[1]
@@ -413,3 +474,8 @@ function fromRegistry (res) {
}
return res
}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json
index 80baa3d32a52fe..58920fe240e5fc 100644
--- a/deps/npm/node_modules/npm-package-arg/package.json
+++ b/deps/npm/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-package-arg",
- "version": "12.0.0",
+ "version": "12.0.2",
"description": "Parse the things that can be arguments to `npm install`",
"main": "./lib/npa.js",
"directories": {
@@ -18,7 +18,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.23.5",
"tap": "^16.0.1"
},
"scripts": {
@@ -55,7 +55,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.23.5",
"publish": true
}
}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9ea..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc99..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index ad65eef0495076..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,352 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const zlib_1 = __importDefault(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new zlib_1.default[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = buffer_1.Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- (0, assert_1.default)(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- buffer_1.Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- buffer_1.Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- buffer_1.Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
- (0, assert_1.default)(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d0..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index a6269b505f47cc..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,333 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
- code;
- errno;
- constructor(err) {
- super('zlib: ' + err.message);
- this.code = err.code;
- this.errno = err.errno;
- /* c8 ignore next */
- if (!this.code)
- this.code = 'ZLIB_ERROR';
- this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
- }
- get name() {
- return 'ZlibError';
- }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
- #sawError = false;
- #ended = false;
- #flushFlag;
- #finishFlushFlag;
- #fullFlushFlag;
- #handle;
- #onError;
- get sawError() {
- return this.#sawError;
- }
- get handle() {
- return this.#handle;
- }
- /* c8 ignore start */
- get flushFlag() {
- return this.#flushFlag;
- }
- /* c8 ignore stop */
- constructor(opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor');
- //@ts-ignore
- super(opts);
- /* c8 ignore start */
- this.#flushFlag = opts.flush ?? 0;
- this.#finishFlushFlag = opts.finishFlush ?? 0;
- this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
- /* c8 ignore stop */
- // this will throw if any options are invalid for the class selected
- try {
- // @types/node doesn't know that it exports the classes, but they're there
- //@ts-ignore
- this.#handle = new realZlib[mode](opts);
- }
- catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er);
- }
- this.#onError = err => {
- // no sense raising multiple errors, since we abort on the first one.
- if (this.#sawError)
- return;
- this.#sawError = true;
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close();
- this.emit('error', err);
- };
- this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
- this.once('end', () => this.close);
- }
- close() {
- if (this.#handle) {
- this.#handle.close();
- this.#handle = undefined;
- this.emit('close');
- }
- }
- reset() {
- if (!this.#sawError) {
- assert(this.#handle, 'zlib binding closed');
- //@ts-ignore
- return this.#handle.reset?.();
- }
- }
- flush(flushFlag) {
- if (this.ended)
- return;
- if (typeof flushFlag !== 'number')
- flushFlag = this.#fullFlushFlag;
- this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
- }
- end(chunk, encoding, cb) {
- /* c8 ignore start */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (chunk) {
- if (encoding)
- this.write(chunk, encoding);
- else
- this.write(chunk);
- }
- this.flush(this.#finishFlushFlag);
- this.#ended = true;
- return super.end(cb);
- }
- get ended() {
- return this.#ended;
- }
- // overridden in the gzip classes to do portable writes
- [_superWrite](data) {
- return super.write(data);
- }
- write(chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- (cb = encoding), (encoding = 'utf8');
- if (typeof chunk === 'string')
- chunk = Buffer.from(chunk, encoding);
- if (this.#sawError)
- return;
- assert(this.#handle, 'zlib binding closed');
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- // diving into the node:zlib internals a bit here
- const nativeHandle = this.#handle
- ._handle;
- const originalNativeClose = nativeHandle.close;
- nativeHandle.close = () => { };
- const originalClose = this.#handle.close;
- this.#handle.close = () => { };
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- Buffer.concat = args => args;
- let result = undefined;
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag]
- : this.#flushFlag;
- result = this.#handle._processChunk(chunk, flushFlag);
- // if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat;
- }
- catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat;
- this.#onError(new ZlibError(err));
- }
- finally {
- if (this.#handle) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- ;
- this.#handle._handle =
- nativeHandle;
- nativeHandle.close = originalNativeClose;
- this.#handle.close = originalClose;
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this.#handle.removeAllListeners('error');
- // make sure OUR error listener is still attached tho
- }
- }
- if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
- let writeReturn;
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- const r = result[0];
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = this[_superWrite](Buffer.from(r));
- for (let i = 1; i < result.length; i++) {
- writeReturn = this[_superWrite](result[i]);
- }
- }
- else {
- // either a single Buffer or an empty array
- writeReturn = this[_superWrite](Buffer.from(result));
- }
- }
- if (cb)
- cb();
- return writeReturn;
- }
-}
-export class Zlib extends ZlibBase {
- #level;
- #strategy;
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.Z_NO_FLUSH;
- opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
- opts.fullFlushFlag = constants.Z_FULL_FLUSH;
- super(opts, mode);
- this.#level = opts.level;
- this.#strategy = opts.strategy;
- }
- params(level, strategy) {
- if (this.sawError)
- return;
- if (!this.handle)
- throw new Error('cannot switch params when binding is closed');
- // no way to test this without also not supporting params at all
- /* c8 ignore start */
- if (!this.handle.params)
- throw new Error('not supported in this implementation');
- /* c8 ignore stop */
- if (this.#level !== level || this.#strategy !== strategy) {
- this.flush(constants.Z_SYNC_FLUSH);
- assert(this.handle, 'zlib binding closed');
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this.handle.flush;
- this.handle.flush = (flushFlag, cb) => {
- /* c8 ignore start */
- if (typeof flushFlag === 'function') {
- cb = flushFlag;
- flushFlag = this.flushFlag;
- }
- /* c8 ignore stop */
- this.flush(flushFlag);
- cb?.();
- };
- try {
- ;
- this.handle.params(level, strategy);
- }
- finally {
- this.handle.flush = origFlush;
- }
- /* c8 ignore start */
- if (this.handle) {
- this.#level = level;
- this.#strategy = strategy;
- }
- /* c8 ignore stop */
- }
- }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
- constructor(opts) {
- super(opts, 'Deflate');
- }
-}
-export class Inflate extends Zlib {
- constructor(opts) {
- super(opts, 'Inflate');
- }
-}
-export class Gzip extends Zlib {
- #portable;
- constructor(opts) {
- super(opts, 'Gzip');
- this.#portable = opts && !!opts.portable;
- }
- [_superWrite](data) {
- if (!this.#portable)
- return super[_superWrite](data);
- // we'll always get the header emitted in one first chunk
- // overwrite the OS indicator byte with 0xFF
- this.#portable = false;
- data[9] = 255;
- return super[_superWrite](data);
- }
-}
-export class Gunzip extends Zlib {
- constructor(opts) {
- super(opts, 'Gunzip');
- }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'DeflateRaw');
- }
-}
-export class InflateRaw extends Zlib {
- constructor(opts) {
- super(opts, 'InflateRaw');
- }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
- constructor(opts) {
- super(opts, 'Unzip');
- }
-}
-export class Brotli extends ZlibBase {
- constructor(opts, mode) {
- opts = opts || {};
- opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
- opts.finishFlush =
- opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
- opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
- super(opts, mode);
- }
-}
-export class BrotliCompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliCompress');
- }
-}
-export class BrotliDecompress extends Brotli {
- constructor(opts) {
- super(opts, 'BrotliDecompress');
- }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json
deleted file mode 100644
index e94623ff43d353..00000000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,81 +0,0 @@
-{
- "name": "minizlib",
- "version": "3.0.1",
- "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "main": "./dist/commonjs/index.js",
- "dependencies": {
- "minipass": "^7.0.4",
- "rimraf": "^5.0.5"
- },
- "scripts": {
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "test": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --loglevel warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minizlib.git"
- },
- "keywords": [
- "zlib",
- "gzip",
- "gunzip",
- "deflate",
- "inflate",
- "compression",
- "zip",
- "unzip"
- ],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "MIT",
- "devDependencies": {
- "@types/node": "^20.11.29",
- "mkdirp": "^3.0.1",
- "tap": "^18.7.1",
- "tshy": "^1.12.0",
- "typedoc": "^0.25.12"
- },
- "files": [
- "dist"
- ],
- "engines": {
- "node": ">= 18"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 75,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- }
-}
diff --git a/deps/npm/node_modules/p-map/index.js b/deps/npm/node_modules/p-map/index.js
index c11a28512a4733..10558008a77283 100644
--- a/deps/npm/node_modules/p-map/index.js
+++ b/deps/npm/node_modules/p-map/index.js
@@ -1,49 +1,107 @@
-'use strict';
-const AggregateError = require('aggregate-error');
-
-module.exports = async (
+export default async function pMap(
iterable,
mapper,
{
- concurrency = Infinity,
- stopOnError = true
- } = {}
-) => {
- return new Promise((resolve, reject) => {
+ concurrency = Number.POSITIVE_INFINITY,
+ stopOnError = true,
+ signal,
+ } = {},
+) {
+ return new Promise((resolve_, reject_) => {
+ if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) {
+ throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`);
+ }
+
if (typeof mapper !== 'function') {
throw new TypeError('Mapper function is required');
}
- if (!((Number.isSafeInteger(concurrency) || concurrency === Infinity) && concurrency >= 1)) {
+ if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) {
throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
}
const result = [];
const errors = [];
- const iterator = iterable[Symbol.iterator]();
+ const skippedIndexesMap = new Map();
let isRejected = false;
+ let isResolved = false;
let isIterableDone = false;
let resolvingCount = 0;
let currentIndex = 0;
+ const iterator = iterable[Symbol.iterator] === undefined ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
+
+ const signalListener = () => {
+ reject(signal.reason);
+ };
- const next = () => {
- if (isRejected) {
+ const cleanup = () => {
+ signal?.removeEventListener('abort', signalListener);
+ };
+
+ const resolve = value => {
+ resolve_(value);
+ cleanup();
+ };
+
+ const reject = reason => {
+ isRejected = true;
+ isResolved = true;
+ reject_(reason);
+ cleanup();
+ };
+
+ if (signal) {
+ if (signal.aborted) {
+ reject(signal.reason);
+ }
+
+ signal.addEventListener('abort', signalListener, {once: true});
+ }
+
+ const next = async () => {
+ if (isResolved) {
return;
}
- const nextItem = iterator.next();
+ const nextItem = await iterator.next();
+
const index = currentIndex;
currentIndex++;
+ // Note: `iterator.next()` can be called many times in parallel.
+ // This can cause multiple calls to this `next()` function to
+ // receive a `nextItem` with `done === true`.
+ // The shutdown logic that rejects/resolves must be protected
+ // so it runs only one time as the `skippedIndex` logic is
+ // non-idempotent.
if (nextItem.done) {
isIterableDone = true;
- if (resolvingCount === 0) {
- if (!stopOnError && errors.length !== 0) {
- reject(new AggregateError(errors));
- } else {
+ if (resolvingCount === 0 && !isResolved) {
+ if (!stopOnError && errors.length > 0) {
+ reject(new AggregateError(errors)); // eslint-disable-line unicorn/error-message
+ return;
+ }
+
+ isResolved = true;
+
+ if (skippedIndexesMap.size === 0) {
resolve(result);
+ return;
+ }
+
+ const pureResult = [];
+
+ // Support multiple `pMapSkip`'s.
+ for (const [index, value] of result.entries()) {
+ if (skippedIndexesMap.get(index) === pMapSkip) {
+ continue;
+ }
+
+ pureResult.push(value);
}
+
+ resolve(pureResult);
}
return;
@@ -51,31 +109,173 @@ module.exports = async (
resolvingCount++;
+ // Intentionally detached
(async () => {
try {
const element = await nextItem.value;
- result[index] = await mapper(element, index);
+
+ if (isResolved) {
+ return;
+ }
+
+ const value = await mapper(element, index);
+
+ // Use Map to stage the index of the element.
+ if (value === pMapSkip) {
+ skippedIndexesMap.set(index, value);
+ }
+
+ result[index] = value;
+
resolvingCount--;
- next();
+ await next();
} catch (error) {
if (stopOnError) {
- isRejected = true;
reject(error);
} else {
errors.push(error);
resolvingCount--;
- next();
+
+ // In that case we can't really continue regardless of `stopOnError` state
+ // since an iterable is likely to continue throwing after it throws once.
+ // If we continue calling `next()` indefinitely we will likely end up
+ // in an infinite loop of failed iteration.
+ try {
+ await next();
+ } catch (error) {
+ reject(error);
+ }
}
}
})();
};
- for (let i = 0; i < concurrency; i++) {
- next();
+ // Create the concurrent runners in a detached (non-awaited)
+ // promise. We need this so we can await the `next()` calls
+ // to stop creating runners before hitting the concurrency limit
+ // if the iterable has already been marked as done.
+ // NOTE: We *must* do this for async iterators otherwise we'll spin up
+ // infinite `next()` calls by default and never start the event loop.
+ (async () => {
+ for (let index = 0; index < concurrency; index++) {
+ try {
+ // eslint-disable-next-line no-await-in-loop
+ await next();
+ } catch (error) {
+ reject(error);
+ break;
+ }
- if (isIterableDone) {
- break;
+ if (isIterableDone || isRejected) {
+ break;
+ }
}
- }
+ })();
});
-};
+}
+
+export function pMapIterable(
+ iterable,
+ mapper,
+ {
+ concurrency = Number.POSITIVE_INFINITY,
+ backpressure = concurrency,
+ } = {},
+) {
+ if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) {
+ throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`);
+ }
+
+ if (typeof mapper !== 'function') {
+ throw new TypeError('Mapper function is required');
+ }
+
+ if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) {
+ throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
+ }
+
+ if (!((Number.isSafeInteger(backpressure) && backpressure >= concurrency) || backpressure === Number.POSITIVE_INFINITY)) {
+ throw new TypeError(`Expected \`backpressure\` to be an integer from \`concurrency\` (${concurrency}) and up or \`Infinity\`, got \`${backpressure}\` (${typeof backpressure})`);
+ }
+
+ return {
+ async * [Symbol.asyncIterator]() {
+ const iterator = iterable[Symbol.asyncIterator] === undefined ? iterable[Symbol.iterator]() : iterable[Symbol.asyncIterator]();
+
+ const promises = [];
+ let runningMappersCount = 0;
+ let isDone = false;
+ let index = 0;
+
+ function trySpawn() {
+ if (isDone || !(runningMappersCount < concurrency && promises.length < backpressure)) {
+ return;
+ }
+
+ const promise = (async () => {
+ const {done, value} = await iterator.next();
+
+ if (done) {
+ return {done: true};
+ }
+
+ runningMappersCount++;
+
+ // Spawn if still below concurrency and backpressure limit
+ trySpawn();
+
+ try {
+ const returnValue = await mapper(await value, index++);
+
+ runningMappersCount--;
+
+ if (returnValue === pMapSkip) {
+ const index = promises.indexOf(promise);
+
+ if (index > 0) {
+ promises.splice(index, 1);
+ }
+ }
+
+ // Spawn if still below backpressure limit and just dropped below concurrency limit
+ trySpawn();
+
+ return {done: false, value: returnValue};
+ } catch (error) {
+ isDone = true;
+ return {error};
+ }
+ })();
+
+ promises.push(promise);
+ }
+
+ trySpawn();
+
+ while (promises.length > 0) {
+ const {error, done, value} = await promises[0]; // eslint-disable-line no-await-in-loop
+
+ promises.shift();
+
+ if (error) {
+ throw error;
+ }
+
+ if (done) {
+ return;
+ }
+
+ // Spawn if just dropped below backpressure limit and below the concurrency limit
+ trySpawn();
+
+ if (value === pMapSkip) {
+ continue;
+ }
+
+ yield value;
+ }
+ },
+ };
+}
+
+export const pMapSkip = Symbol('skip');
diff --git a/deps/npm/node_modules/p-map/package.json b/deps/npm/node_modules/p-map/package.json
index 042b1af553f2de..b7b6594c855d8c 100644
--- a/deps/npm/node_modules/p-map/package.json
+++ b/deps/npm/node_modules/p-map/package.json
@@ -1,6 +1,6 @@
{
"name": "p-map",
- "version": "4.0.0",
+ "version": "7.0.3",
"description": "Map over promises concurrently",
"license": "MIT",
"repository": "sindresorhus/p-map",
@@ -10,8 +10,14 @@
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
+ "type": "module",
+ "exports": {
+ "types": "./index.d.ts",
+ "default": "./index.js"
+ },
+ "sideEffects": false,
"engines": {
- "node": ">=10"
+ "node": ">=18"
},
"scripts": {
"test": "xo && ava && tsd"
@@ -38,16 +44,14 @@
"parallel",
"bluebird"
],
- "dependencies": {
- "aggregate-error": "^3.0.0"
- },
"devDependencies": {
- "ava": "^2.2.0",
- "delay": "^4.1.0",
- "in-range": "^2.0.0",
- "random-int": "^2.0.0",
- "time-span": "^3.1.0",
- "tsd": "^0.7.4",
- "xo": "^0.27.2"
+ "ava": "^5.2.0",
+ "chalk": "^5.3.0",
+ "delay": "^6.0.0",
+ "in-range": "^3.0.0",
+ "random-int": "^3.0.0",
+ "time-span": "^5.1.0",
+ "tsd": "^0.29.0",
+ "xo": "^0.56.0"
}
}
diff --git a/deps/npm/node_modules/postcss-selector-parser/API.md b/deps/npm/node_modules/postcss-selector-parser/API.md
index c8e55ee53f6ebf..e564830b66b04b 100644
--- a/deps/npm/node_modules/postcss-selector-parser/API.md
+++ b/deps/npm/node_modules/postcss-selector-parser/API.md
@@ -254,7 +254,7 @@ if (next && next.type !== 'combinator') {
}
```
-### `node.replaceWith(node)`
+### `node.replaceWith(node[,...nodeN])`
Replace a node with another.
@@ -267,6 +267,8 @@ attr.replaceWith(className);
Arguments:
* `node`: The node to substitute the original with.
+...
+* `nodeN`: The node to substitute the original with.
### `node.remove()`
@@ -531,7 +533,7 @@ Arguments:
* `node`: The node to add.
-### `container.insertBefore(old, new)` & `container.insertAfter(old, new)`
+### `container.insertBefore(old, new[, ...newNodes])` & `container.insertAfter(old, new[, ...newNodes])`
Add a node before or after an existing node in a container:
diff --git a/deps/npm/node_modules/postcss-selector-parser/dist/selectors/container.js b/deps/npm/node_modules/postcss-selector-parser/dist/selectors/container.js
index be35fa994138ab..84755cbd541a2b 100644
--- a/deps/npm/node_modules/postcss-selector-parser/dist/selectors/container.js
+++ b/deps/npm/node_modules/postcss-selector-parser/dist/selectors/container.js
@@ -33,6 +33,9 @@ var Container = /*#__PURE__*/function (_Node) {
_proto.prepend = function prepend(selector) {
selector.parent = this;
this.nodes.unshift(selector);
+ for (var id in this.indexes) {
+ this.indexes[id]++;
+ }
return this;
};
_proto.at = function at(index) {
@@ -69,29 +72,39 @@ var Container = /*#__PURE__*/function (_Node) {
return this.removeAll();
};
_proto.insertAfter = function insertAfter(oldNode, newNode) {
+ var _this$nodes;
newNode.parent = this;
var oldIndex = this.index(oldNode);
- this.nodes.splice(oldIndex + 1, 0, newNode);
+ var resetNode = [];
+ for (var i = 2; i < arguments.length; i++) {
+ resetNode.push(arguments[i]);
+ }
+ (_this$nodes = this.nodes).splice.apply(_this$nodes, [oldIndex + 1, 0, newNode].concat(resetNode));
newNode.parent = this;
var index;
for (var id in this.indexes) {
index = this.indexes[id];
- if (oldIndex <= index) {
- this.indexes[id] = index + 1;
+ if (oldIndex < index) {
+ this.indexes[id] = index + arguments.length - 1;
}
}
return this;
};
_proto.insertBefore = function insertBefore(oldNode, newNode) {
+ var _this$nodes2;
newNode.parent = this;
var oldIndex = this.index(oldNode);
- this.nodes.splice(oldIndex, 0, newNode);
+ var resetNode = [];
+ for (var i = 2; i < arguments.length; i++) {
+ resetNode.push(arguments[i]);
+ }
+ (_this$nodes2 = this.nodes).splice.apply(_this$nodes2, [oldIndex, 0, newNode].concat(resetNode));
newNode.parent = this;
var index;
for (var id in this.indexes) {
index = this.indexes[id];
- if (index <= oldIndex) {
- this.indexes[id] = index + 1;
+ if (index >= oldIndex) {
+ this.indexes[id] = index + arguments.length - 1;
}
}
return this;
diff --git a/deps/npm/node_modules/postcss-selector-parser/package.json b/deps/npm/node_modules/postcss-selector-parser/package.json
index 0b074d0fd4f33c..f8b1d3619c0be1 100644
--- a/deps/npm/node_modules/postcss-selector-parser/package.json
+++ b/deps/npm/node_modules/postcss-selector-parser/package.json
@@ -1,6 +1,6 @@
{
"name": "postcss-selector-parser",
- "version": "6.1.2",
+ "version": "7.1.0",
"devDependencies": {
"@babel/cli": "^7.11.6",
"@babel/core": "^7.11.6",
diff --git a/deps/npm/node_modules/promise-inflight/LICENSE b/deps/npm/node_modules/promise-inflight/LICENSE
deleted file mode 100644
index 83e7c4c62903d7..00000000000000
--- a/deps/npm/node_modules/promise-inflight/LICENSE
+++ /dev/null
@@ -1,14 +0,0 @@
-Copyright (c) 2017, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
diff --git a/deps/npm/node_modules/promise-inflight/inflight.js b/deps/npm/node_modules/promise-inflight/inflight.js
deleted file mode 100644
index ce054d34be8595..00000000000000
--- a/deps/npm/node_modules/promise-inflight/inflight.js
+++ /dev/null
@@ -1,36 +0,0 @@
-'use strict'
-module.exports = inflight
-
-let Bluebird
-try {
- Bluebird = require('bluebird')
-} catch (_) {
- Bluebird = Promise
-}
-
-const active = {}
-inflight.active = active
-function inflight (unique, doFly) {
- return Bluebird.all([unique, doFly]).then(function (args) {
- const unique = args[0]
- const doFly = args[1]
- if (Array.isArray(unique)) {
- return Bluebird.all(unique).then(function (uniqueArr) {
- return _inflight(uniqueArr.join(''), doFly)
- })
- } else {
- return _inflight(unique, doFly)
- }
- })
-
- function _inflight (unique, doFly) {
- if (!active[unique]) {
- active[unique] = (new Bluebird(function (resolve) {
- return resolve(doFly())
- }))
- active[unique].then(cleanup, cleanup)
- function cleanup() { delete active[unique] }
- }
- return active[unique]
- }
-}
diff --git a/deps/npm/node_modules/promise-inflight/package.json b/deps/npm/node_modules/promise-inflight/package.json
deleted file mode 100644
index 0d8930c5b6d492..00000000000000
--- a/deps/npm/node_modules/promise-inflight/package.json
+++ /dev/null
@@ -1,24 +0,0 @@
-{
- "name": "promise-inflight",
- "version": "1.0.1",
- "description": "One promise for multiple requests in flight to avoid async duplication",
- "main": "inflight.js",
- "files": [
- "inflight.js"
- ],
- "license": "ISC",
- "scripts": {
- "test": "echo \"Error: no test specified\" && exit 1"
- },
- "keywords": [],
- "author": "Rebecca Turner (http://re-becca.org/)",
- "devDependencies": {},
- "repository": {
- "type": "git",
- "url": "git+https://github.com/iarna/promise-inflight.git"
- },
- "bugs": {
- "url": "https://github.com/iarna/promise-inflight/issues"
- },
- "homepage": "https://github.com/iarna/promise-inflight#readme"
-}
diff --git a/deps/npm/node_modules/read/dist/commonjs/read.js b/deps/npm/node_modules/read/dist/commonjs/read.js
index c0600d2b4e8cae..744a5f3bf4baf8 100644
--- a/deps/npm/node_modules/read/dist/commonjs/read.js
+++ b/deps/npm/node_modules/read/dist/commonjs/read.js
@@ -6,7 +6,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.read = read;
const mute_stream_1 = __importDefault(require("mute-stream"));
const readline_1 = require("readline");
-async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, }) {
+async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, history, }) {
if (typeof def !== 'undefined' &&
typeof def !== 'string' &&
typeof def !== 'number') {
@@ -34,7 +34,7 @@ async function read({ default: def, input = process.stdin, output = process.stdo
m.pipe(output, { end: false });
output = m;
return new Promise((resolve, reject) => {
- const rl = (0, readline_1.createInterface)({ input, output, terminal, completer });
+ const rl = (0, readline_1.createInterface)({ input, output, terminal, completer, history });
// TODO: add tests for timeout
/* c8 ignore start */
const timer = timeout && setTimeout(() => onError(new Error('timed out')), timeout);
diff --git a/deps/npm/node_modules/read/dist/esm/read.js b/deps/npm/node_modules/read/dist/esm/read.js
index 716d394c876ac9..672be49ae88a76 100644
--- a/deps/npm/node_modules/read/dist/esm/read.js
+++ b/deps/npm/node_modules/read/dist/esm/read.js
@@ -1,6 +1,6 @@
import Mute from 'mute-stream';
import { createInterface } from 'readline';
-export async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, }) {
+export async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, history, }) {
if (typeof def !== 'undefined' &&
typeof def !== 'string' &&
typeof def !== 'number') {
@@ -28,7 +28,7 @@ export async function read({ default: def, input = process.stdin, output = proce
m.pipe(output, { end: false });
output = m;
return new Promise((resolve, reject) => {
- const rl = createInterface({ input, output, terminal, completer });
+ const rl = createInterface({ input, output, terminal, completer, history });
// TODO: add tests for timeout
/* c8 ignore start */
const timer = timeout && setTimeout(() => onError(new Error('timed out')), timeout);
diff --git a/deps/npm/node_modules/read/package.json b/deps/npm/node_modules/read/package.json
index 337f7d26d4dd94..1d88f22dd59f52 100644
--- a/deps/npm/node_modules/read/package.json
+++ b/deps/npm/node_modules/read/package.json
@@ -1,6 +1,6 @@
{
"name": "read",
- "version": "4.0.0",
+ "version": "4.1.0",
"exports": {
"./package.json": "./package.json",
".": {
diff --git a/deps/npm/node_modules/rimraf/LICENSE b/deps/npm/node_modules/rimraf/LICENSE
deleted file mode 100644
index 1493534e60dce4..00000000000000
--- a/deps/npm/node_modules/rimraf/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md
deleted file mode 100644
index 7ab1a5d6d77ffa..00000000000000
--- a/deps/npm/node_modules/rimraf/README.md
+++ /dev/null
@@ -1,220 +0,0 @@
-The [UNIX command]() `rm -rf` for node
-in a cross-platform implementation.
-
-Install with `npm install rimraf`.
-
-## Major Changes
-
-### v4 to v5
-
-- There is no default export anymore. Import the functions directly
- using, e.g., `import { rimrafSync } from 'rimraf'`.
-
-### v3 to v4
-
-- The function returns a `Promise` instead of taking a callback.
-- Globbing requires the `--glob` CLI option or `glob` option property
- to be set. (Removed in 4.0 and 4.1, opt-in support added in 4.2.)
-- Functions take arrays of paths, as well as a single path.
-- Native implementation used by default when available, except on
- Windows, where this implementation is faster and more reliable.
-- New implementation on Windows, falling back to "move then
- remove" strategy when exponential backoff for `EBUSY` fails to
- resolve the situation.
-- Simplified implementation on POSIX, since the Windows
- affordances are not necessary there.
-- As of 4.3, return/resolve value is boolean instead of undefined.
-
-## API
-
-Hybrid module, load either with `import` or `require()`.
-
-```js
-// 'rimraf' export is the one you probably want, but other
-// strategies exported as well.
-import { rimraf, rimrafSync, native, nativeSync } from 'rimraf'
-// or
-const { rimraf, rimrafSync, native, nativeSync } = require('rimraf')
-```
-
-All removal functions return a boolean indicating that all
-entries were successfully removed.
-
-The only case in which this will not return `true` is if
-something was omitted from the removal via a `filter` option.
-
-### `rimraf(f, [opts]) -> Promise`
-
-This first parameter is a path or array of paths. The second
-argument is an options object.
-
-Options:
-
-- `preserveRoot`: If set to boolean `false`, then allow the
- recursive removal of the root directory. Otherwise, this is
- not allowed.
-- `tmp`: Windows only. Temp folder to place files and
- folders for the "move then remove" fallback. Must be on the
- same physical device as the path being deleted. Defaults to
- `os.tmpdir()` when that is on the same drive letter as the path
- being deleted, or `${drive}:\temp` if present, or `${drive}:\`
- if not.
-- `maxRetries`: Windows and Native only. Maximum number of
- retry attempts in case of `EBUSY`, `EMFILE`, and `ENFILE`
- errors. Default `10` for Windows implementation, `0` for Native
- implementation.
-- `backoff`: Windows only. Rate of exponential backoff for async
- removal in case of `EBUSY`, `EMFILE`, and `ENFILE` errors.
- Should be a number greater than 1. Default `1.2`
-- `maxBackoff`: Windows only. Maximum total backoff time in ms to
- attempt asynchronous retries in case of `EBUSY`, `EMFILE`, and
- `ENFILE` errors. Default `200`. With the default `1.2` backoff
- rate, this results in 14 retries, with the final retry being
- delayed 33ms.
-- `retryDelay`: Native only. Time to wait between retries, using
- linear backoff. Default `100`.
-- `signal` Pass in an AbortSignal to cancel the directory
- removal. This is useful when removing large folder structures,
- if you'd like to limit the time spent.
-
- Using a `signal` option prevents the use of Node's built-in
- `fs.rm` because that implementation does not support abort
- signals.
-
-- `glob` Boolean flag to treat path as glob pattern, or an object
- specifying [`glob` options](https://github.com/isaacs/node-glob).
-- `filter` Method that returns a boolean indicating whether that
- path should be deleted. With async `rimraf` methods, this may
- return a Promise that resolves to a boolean. (Since Promises
- are truthy, returning a Promise from a sync filter is the same
- as just not filtering anything.)
-
- The first argument to the filter is the path string. The
- second argument is either a `Dirent` or `Stats` object for that
- path. (The first path explored will be a `Stats`, the rest
- will be `Dirent`.)
-
- If a filter method is provided, it will _only_ remove entries
- if the filter returns (or resolves to) a truthy value. Omitting
- a directory will still allow its children to be removed, unless
- they are also filtered out, but any parents of a filtered entry
- will not be removed, since the directory will not be empty in
- that case.
-
- Using a filter method prevents the use of Node's built-in
- `fs.rm` because that implementation does not support filtering.
-
-Any other options are provided to the native Node.js `fs.rm` implementation
-when that is used.
-
-This will attempt to choose the best implementation, based on the Node.js
-version and `process.platform`. To force a specific implementation, use
-one of the other functions provided.
-
-### `rimraf.sync(f, [opts])`
`rimraf.rimrafSync(f, [opts])`
-
-Synchronous form of `rimraf()`
-
-Note that, unlike many file system operations, the synchronous form will
-typically be significantly _slower_ than the async form, because recursive
-deletion is extremely parallelizable.
-
-### `rimraf.native(f, [opts])`
-
-Uses the built-in `fs.rm` implementation that Node.js provides. This is
-used by default on Node.js versions greater than or equal to `14.14.0`.
-
-### `rimraf.native.sync(f, [opts])`
`rimraf.nativeSync(f, [opts])`
-
-Synchronous form of `rimraf.native`
-
-### `rimraf.manual(f, [opts])`
-
-Use the JavaScript implementation appropriate for your operating system.
-
-### `rimraf.manual.sync(f, [opts])`
`rimraf.manualSync(f, opts)`
-
-Synchronous form of `rimraf.manual()`
-
-### `rimraf.windows(f, [opts])`
-
-JavaScript implementation of file removal appropriate for Windows
-platforms. Works around `unlink` and `rmdir` not being atomic
-operations, and `EPERM` when deleting files with certain
-permission modes.
-
-First deletes all non-directory files within the tree, and then
-removes all directories, which should ideally be empty by that
-time. When an `ENOTEMPTY` is raised in the second pass, falls
-back to the `rimraf.moveRemove` strategy as needed.
-
-### `rimraf.windows.sync(path, [opts])`
`rimraf.windowsSync(path, [opts])`
-
-Synchronous form of `rimraf.windows()`
-
-### `rimraf.moveRemove(path, [opts])`
-
-Moves all files and folders to the parent directory of `path`
-with a temporary filename prior to attempting to remove them.
-
-Note that, in cases where the operation fails, this _may_ leave
-files lying around in the parent directory with names like
-`.file-basename.txt.0.123412341`. Until the Windows kernel
-provides a way to perform atomic `unlink` and `rmdir` operations,
-this is, unfortunately, unavoidable.
-
-To move files to a different temporary directory other than the
-parent, provide `opts.tmp`. Note that this _must_ be on the same
-physical device as the folder being deleted, or else the
-operation will fail.
-
-This is the slowest strategy, but most reliable on Windows
-platforms. Used as a last-ditch fallback by `rimraf.windows()`.
-
-### `rimraf.moveRemove.sync(path, [opts])`
`rimraf.moveRemoveSync(path, [opts])`
-
-Synchronous form of `rimraf.moveRemove()`
-
-### Command Line Interface
-
-```
-rimraf version 4.3.0
-
-Usage: rimraf [ ...]
-Deletes all files and folders at "path", recursively.
-
-Options:
- -- Treat all subsequent arguments as paths
- -h --help Display this usage info
- --preserve-root Do not remove '/' recursively (default)
- --no-preserve-root Do not treat '/' specially
- -G --no-glob Treat arguments as literal paths, not globs (default)
- -g --glob Treat arguments as glob patterns
- -v --verbose Be verbose when deleting files, showing them as
- they are removed. Not compatible with --impl=native
- -V --no-verbose Be silent when deleting files, showing nothing as
- they are removed (default)
- -i --interactive Ask for confirmation before deleting anything
- Not compatible with --impl=native
- -I --no-interactive Do not ask for confirmation before deleting
-
- --impl= Specify the implementation to use:
- rimraf: choose the best option (default)
- native: the built-in implementation in Node.js
- manual: the platform-specific JS implementation
- posix: the Posix JS implementation
- windows: the Windows JS implementation (falls back to
- move-remove on ENOTEMPTY)
- move-remove: a slow reliable Windows fallback
-
-Implementation-specific options:
- --tmp= Temp file folder for 'move-remove' implementation
- --max-retries= maxRetries for 'native' and 'windows' implementations
- --retry-delay= retryDelay for 'native' implementation, default 100
- --backoff= Exponential backoff factor for retries (default: 1.2)
-```
-
-## mkdirp
-
-If you need to _create_ a directory recursively, check out
-[mkdirp](https://github.com/isaacs/node-mkdirp).
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts
deleted file mode 100644
index a68e925b249a8d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const defaultTmp: (path: string) => Promise;
-export declare const defaultTmpSync: (path: string) => string;
-//# sourceMappingURL=default-tmp.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map
deleted file mode 100644
index d0b35f2786233b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js
deleted file mode 100644
index ae9087881962da..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.defaultTmpSync = exports.defaultTmp = void 0;
-// The default temporary folder location for use in the windows algorithm.
-// It's TEMPting to use dirname(path), since that's guaranteed to be on the
-// same device. However, this means that:
-// rimraf(path).then(() => rimraf(dirname(path)))
-// will often fail with EBUSY, because the parent dir contains
-// marked-for-deletion directory entries (which do not show up in readdir).
-// The approach here is to use os.tmpdir() if it's on the same drive letter,
-// or resolve(path, '\\temp') if it exists, or the root of the drive if not.
-// On Posix (not that you'd be likely to use the windows algorithm there),
-// it uses os.tmpdir() always.
-const os_1 = require("os");
-const path_1 = require("path");
-const fs_js_1 = require("./fs.js");
-const platform_js_1 = __importDefault(require("./platform.js"));
-const { stat } = fs_js_1.promises;
-const isDirSync = (path) => {
- try {
- return (0, fs_js_1.statSync)(path).isDirectory();
- }
- catch (er) {
- return false;
- }
-};
-const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false);
-const win32DefaultTmp = async (path) => {
- const { root } = (0, path_1.parse)(path);
- const tmp = (0, os_1.tmpdir)();
- const { root: tmpRoot } = (0, path_1.parse)(tmp);
- if (root.toLowerCase() === tmpRoot.toLowerCase()) {
- return tmp;
- }
- const driveTmp = (0, path_1.resolve)(root, '/temp');
- if (await isDir(driveTmp)) {
- return driveTmp;
- }
- return root;
-};
-const win32DefaultTmpSync = (path) => {
- const { root } = (0, path_1.parse)(path);
- const tmp = (0, os_1.tmpdir)();
- const { root: tmpRoot } = (0, path_1.parse)(tmp);
- if (root.toLowerCase() === tmpRoot.toLowerCase()) {
- return tmp;
- }
- const driveTmp = (0, path_1.resolve)(root, '/temp');
- if (isDirSync(driveTmp)) {
- return driveTmp;
- }
- return root;
-};
-const posixDefaultTmp = async () => (0, os_1.tmpdir)();
-const posixDefaultTmpSync = () => (0, os_1.tmpdir)();
-exports.defaultTmp = platform_js_1.default === 'win32' ? win32DefaultTmp : posixDefaultTmp;
-exports.defaultTmpSync = platform_js_1.default === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync;
-//# sourceMappingURL=default-tmp.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map
deleted file mode 100644
index 4984afb1b21290..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":";;;;;;AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,2BAA2B;AAC3B,+BAAqC;AACrC,mCAA4C;AAC5C,gEAAoC;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,gBAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,IAAA,gBAAQ,EAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAE7B,QAAA,UAAU,GACrB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC7C,QAAA,cAAc,GACzB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts
deleted file mode 100644
index 20e76a82c4942e..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise;
-export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any;
-//# sourceMappingURL=fix-eperm.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map
deleted file mode 100644
index ac17d6f4e060bb..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js
deleted file mode 100644
index 7baecb7c9589bd..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js
+++ /dev/null
@@ -1,58 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fixEPERMSync = exports.fixEPERM = void 0;
-const fs_js_1 = require("./fs.js");
-const { chmod } = fs_js_1.promises;
-const fixEPERM = (fn) => async (path) => {
- try {
- return await fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOENT') {
- return;
- }
- if (fer?.code === 'EPERM') {
- try {
- await chmod(path, 0o666);
- }
- catch (er2) {
- const fer2 = er2;
- if (fer2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- return await fn(path);
- }
- throw er;
- }
-};
-exports.fixEPERM = fixEPERM;
-const fixEPERMSync = (fn) => (path) => {
- try {
- return fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOENT') {
- return;
- }
- if (fer?.code === 'EPERM') {
- try {
- (0, fs_js_1.chmodSync)(path, 0o666);
- }
- catch (er2) {
- const fer2 = er2;
- if (fer2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- return fn(path);
- }
- throw er;
- }
-};
-exports.fixEPERMSync = fixEPERMSync;
-//# sourceMappingURL=fix-eperm.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map
deleted file mode 100644
index 250ea5d5b4cbc6..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":";;;AAAA,mCAA6C;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEnB,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAvBU,QAAA,QAAQ,YAuBlB;AAEI,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAtBY,QAAA,YAAY,gBAsBxB","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts
deleted file mode 100644
index 9e4e95b4e7a411..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-import fs, { Dirent } from 'fs';
-export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs';
-export declare const readdirSync: (path: fs.PathLike) => Dirent[];
-export declare const promises: {
- chmod: (path: fs.PathLike, mode: fs.Mode) => Promise;
- mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & {
- recursive?: boolean | null;
- }) | undefined | null) => Promise;
- readdir: (path: fs.PathLike) => Promise;
- rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise;
- rm: (path: fs.PathLike, options: fs.RmOptions) => Promise;
- rmdir: (path: fs.PathLike) => Promise;
- stat: (path: fs.PathLike) => Promise;
- lstat: (path: fs.PathLike) => Promise;
- unlink: (path: fs.PathLike) => Promise;
-};
-//# sourceMappingURL=fs.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map
deleted file mode 100644
index 8c8b1034cbcd27..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js
deleted file mode 100644
index dba64c9830ed82..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js
+++ /dev/null
@@ -1,46 +0,0 @@
-"use strict";
-// promisify ourselves, because older nodes don't have fs.promises
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.promises = exports.readdirSync = exports.unlinkSync = exports.lstatSync = exports.statSync = exports.rmSync = exports.rmdirSync = exports.renameSync = exports.mkdirSync = exports.chmodSync = void 0;
-const fs_1 = __importDefault(require("fs"));
-// sync ones just take the sync version from node
-var fs_2 = require("fs");
-Object.defineProperty(exports, "chmodSync", { enumerable: true, get: function () { return fs_2.chmodSync; } });
-Object.defineProperty(exports, "mkdirSync", { enumerable: true, get: function () { return fs_2.mkdirSync; } });
-Object.defineProperty(exports, "renameSync", { enumerable: true, get: function () { return fs_2.renameSync; } });
-Object.defineProperty(exports, "rmdirSync", { enumerable: true, get: function () { return fs_2.rmdirSync; } });
-Object.defineProperty(exports, "rmSync", { enumerable: true, get: function () { return fs_2.rmSync; } });
-Object.defineProperty(exports, "statSync", { enumerable: true, get: function () { return fs_2.statSync; } });
-Object.defineProperty(exports, "lstatSync", { enumerable: true, get: function () { return fs_2.lstatSync; } });
-Object.defineProperty(exports, "unlinkSync", { enumerable: true, get: function () { return fs_2.unlinkSync; } });
-const fs_3 = require("fs");
-const readdirSync = (path) => (0, fs_3.readdirSync)(path, { withFileTypes: true });
-exports.readdirSync = readdirSync;
-// unrolled for better inlining, this seems to get better performance
-// than something like:
-// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)
-// which would be a bit cleaner.
-const chmod = (path, mode) => new Promise((res, rej) => fs_1.default.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d))));
-const mkdir = (path, options) => new Promise((res, rej) => fs_1.default.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))));
-const readdir = (path) => new Promise((res, rej) => fs_1.default.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data)));
-const rename = (oldPath, newPath) => new Promise((res, rej) => fs_1.default.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d)));
-const rm = (path, options) => new Promise((res, rej) => fs_1.default.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d))));
-const rmdir = (path) => new Promise((res, rej) => fs_1.default.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d))));
-const stat = (path) => new Promise((res, rej) => fs_1.default.stat(path, (er, data) => (er ? rej(er) : res(data))));
-const lstat = (path) => new Promise((res, rej) => fs_1.default.lstat(path, (er, data) => (er ? rej(er) : res(data))));
-const unlink = (path) => new Promise((res, rej) => fs_1.default.unlink(path, (er, ...d) => (er ? rej(er) : res(...d))));
-exports.promises = {
- chmod,
- mkdir,
- readdir,
- rename,
- rm,
- rmdir,
- stat,
- lstat,
- unlink,
-};
-//# sourceMappingURL=fs.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map
deleted file mode 100644
index 9d9e1fba4b2dc8..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":";AAAA,kEAAkE;;;;;;AAElE,4CAA+B;AAE/B,iDAAiD;AACjD,yBASW;AART,+FAAA,SAAS,OAAA;AACT,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AACV,+FAAA,SAAS,OAAA;AACT,4FAAA,MAAM,OAAA;AACN,8FAAA,QAAQ,OAAA;AACR,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AAGZ,2BAA0C;AACnC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAD1B,QAAA,WAAW,eACe;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,YAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEU,QAAA,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts
deleted file mode 100644
index f158cc27025b16..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const ignoreENOENT: (p: Promise) => Promise;
-export declare const ignoreENOENTSync: (fn: () => any) => any;
-//# sourceMappingURL=ignore-enoent.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map
deleted file mode 100644
index 2cfb3bbac5fab7..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js
deleted file mode 100644
index 02595342121f79..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js
+++ /dev/null
@@ -1,21 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ignoreENOENTSync = exports.ignoreENOENT = void 0;
-const ignoreENOENT = async (p) => p.catch(er => {
- if (er.code !== 'ENOENT') {
- throw er;
- }
-});
-exports.ignoreENOENT = ignoreENOENT;
-const ignoreENOENTSync = (fn) => {
- try {
- return fn();
- }
- catch (er) {
- if (er?.code !== 'ENOENT') {
- throw er;
- }
- }
-};
-exports.ignoreENOENTSync = ignoreENOENTSync;
-//# sourceMappingURL=ignore-enoent.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map
deleted file mode 100644
index 7acf4c29d1e56f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":";;;AAAO,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AALS,QAAA,YAAY,gBAKrB;AAEG,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA;AARY,QAAA,gBAAgB,oBAQ5B","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts
deleted file mode 100644
index 9ec4a124ab613d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js';
-export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js';
-export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise;
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map
deleted file mode 100644
index 0dc659ca730252..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js b/deps/npm/node_modules/rimraf/dist/commonjs/index.js
deleted file mode 100644
index 09b5d9993c1e78..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/index.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimraf = exports.sync = exports.rimrafSync = exports.moveRemove = exports.moveRemoveSync = exports.posix = exports.posixSync = exports.windows = exports.windowsSync = exports.manual = exports.manualSync = exports.native = exports.nativeSync = exports.isRimrafOptions = exports.assertRimrafOptions = void 0;
-const glob_1 = require("glob");
-const opt_arg_js_1 = require("./opt-arg.js");
-const path_arg_js_1 = __importDefault(require("./path-arg.js"));
-const rimraf_manual_js_1 = require("./rimraf-manual.js");
-const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js");
-const rimraf_native_js_1 = require("./rimraf-native.js");
-const rimraf_posix_js_1 = require("./rimraf-posix.js");
-const rimraf_windows_js_1 = require("./rimraf-windows.js");
-const use_native_js_1 = require("./use-native.js");
-var opt_arg_js_2 = require("./opt-arg.js");
-Object.defineProperty(exports, "assertRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.assertRimrafOptions; } });
-Object.defineProperty(exports, "isRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.isRimrafOptions; } });
-const wrap = (fn) => async (path, opt) => {
- const options = (0, opt_arg_js_1.optArg)(opt);
- if (options.glob) {
- path = await (0, glob_1.glob)(path, options.glob);
- }
- if (Array.isArray(path)) {
- return !!(await Promise.all(path.map(p => fn((0, path_arg_js_1.default)(p, options), options)))).reduce((a, b) => a && b, true);
- }
- else {
- return !!(await fn((0, path_arg_js_1.default)(path, options), options));
- }
-};
-const wrapSync = (fn) => (path, opt) => {
- const options = (0, opt_arg_js_1.optArgSync)(opt);
- if (options.glob) {
- path = (0, glob_1.globSync)(path, options.glob);
- }
- if (Array.isArray(path)) {
- return !!path
- .map(p => fn((0, path_arg_js_1.default)(p, options), options))
- .reduce((a, b) => a && b, true);
- }
- else {
- return !!fn((0, path_arg_js_1.default)(path, options), options);
- }
-};
-exports.nativeSync = wrapSync(rimraf_native_js_1.rimrafNativeSync);
-exports.native = Object.assign(wrap(rimraf_native_js_1.rimrafNative), { sync: exports.nativeSync });
-exports.manualSync = wrapSync(rimraf_manual_js_1.rimrafManualSync);
-exports.manual = Object.assign(wrap(rimraf_manual_js_1.rimrafManual), { sync: exports.manualSync });
-exports.windowsSync = wrapSync(rimraf_windows_js_1.rimrafWindowsSync);
-exports.windows = Object.assign(wrap(rimraf_windows_js_1.rimrafWindows), { sync: exports.windowsSync });
-exports.posixSync = wrapSync(rimraf_posix_js_1.rimrafPosixSync);
-exports.posix = Object.assign(wrap(rimraf_posix_js_1.rimrafPosix), { sync: exports.posixSync });
-exports.moveRemoveSync = wrapSync(rimraf_move_remove_js_1.rimrafMoveRemoveSync);
-exports.moveRemove = Object.assign(wrap(rimraf_move_remove_js_1.rimrafMoveRemove), {
- sync: exports.moveRemoveSync,
-});
-exports.rimrafSync = wrapSync((path, opt) => (0, use_native_js_1.useNativeSync)(opt) ?
- (0, rimraf_native_js_1.rimrafNativeSync)(path, opt)
- : (0, rimraf_manual_js_1.rimrafManualSync)(path, opt));
-exports.sync = exports.rimrafSync;
-const rimraf_ = wrap((path, opt) => (0, use_native_js_1.useNative)(opt) ? (0, rimraf_native_js_1.rimrafNative)(path, opt) : (0, rimraf_manual_js_1.rimrafManual)(path, opt));
-exports.rimraf = Object.assign(rimraf_, {
- rimraf: rimraf_,
- sync: exports.rimrafSync,
- rimrafSync: exports.rimrafSync,
- manual: exports.manual,
- manualSync: exports.manualSync,
- native: exports.native,
- nativeSync: exports.nativeSync,
- posix: exports.posix,
- posixSync: exports.posixSync,
- windows: exports.windows,
- windowsSync: exports.windowsSync,
- moveRemove: exports.moveRemove,
- moveRemoveSync: exports.moveRemoveSync,
-});
-exports.rimraf.rimraf = exports.rimraf;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map
deleted file mode 100644
index 5ed1978ae92f1f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,+BAAqC;AACrC,6CAKqB;AACrB,gEAAmC;AACnC,yDAAmE;AACnE,mEAAgF;AAChF,yDAAmE;AACnE,uDAAgE;AAChE,2DAAsE;AACtE,mDAA0D;AAE1D,2CAMqB;AALnB,iHAAA,mBAAmB,OAAA;AACnB,6GAAA,eAAe,OAAA;AAMjB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,IAAA,mBAAM,EAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAA,WAAI,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,IAAA,uBAAU,EAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,IAAA,eAAQ,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEU,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,WAAW,GAAG,QAAQ,CAAC,qCAAiB,CAAC,CAAA;AACzC,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iCAAa,CAAC,EAAE,EAAE,IAAI,EAAE,mBAAW,EAAE,CAAC,CAAA;AAEnE,QAAA,SAAS,GAAG,QAAQ,CAAC,iCAAe,CAAC,CAAA;AACrC,QAAA,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAW,CAAC,EAAE,EAAE,IAAI,EAAE,iBAAS,EAAE,CAAC,CAAA;AAE7D,QAAA,cAAc,GAAG,QAAQ,CAAC,4CAAoB,CAAC,CAAA;AAC/C,QAAA,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,wCAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,sBAAc;CACrB,CAAC,CAAA;AAEW,QAAA,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,IAAA,6BAAa,EAAC,GAAG,CAAC,CAAC,CAAC;IAClB,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACY,QAAA,IAAI,GAAG,kBAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,IAAA,yBAAS,EAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACY,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,kBAAU;IAChB,UAAU,EAAE,kBAAU;IACtB,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,KAAK,EAAL,aAAK;IACL,SAAS,EAAT,iBAAS;IACT,OAAO,EAAP,eAAO;IACP,WAAW,EAAX,mBAAW;IACX,UAAU,EAAV,kBAAU;IACV,cAAc,EAAd,sBAAc;CACf,CAAC,CAAA;AACF,cAAM,CAAC,MAAM,GAAG,cAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts
deleted file mode 100644
index c869d4ae85251b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { Dirent, Stats } from 'fs';
-import { GlobOptions } from 'glob';
-export declare const isRimrafOptions: (o: any) => o is RimrafOptions;
-export declare const assertRimrafOptions: (o: any) => void;
-export interface RimrafAsyncOptions {
- preserveRoot?: boolean;
- tmp?: string;
- maxRetries?: number;
- retryDelay?: number;
- backoff?: number;
- maxBackoff?: number;
- signal?: AbortSignal;
- glob?: boolean | GlobOptions;
- filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise);
-}
-export interface RimrafSyncOptions extends RimrafAsyncOptions {
- filter?: (path: string, ent: Dirent | Stats) => boolean;
-}
-export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions;
-export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & {
- glob: GlobOptions & {
- withFileTypes: false;
- };
-}) | (RimrafAsyncOptions & {
- glob: undefined;
-});
-export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & {
- glob: GlobOptions & {
- withFileTypes: false;
- };
-}) | (RimrafSyncOptions & {
- glob: undefined;
-});
-//# sourceMappingURL=opt-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map
deleted file mode 100644
index 89e83b205ac628..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js
deleted file mode 100644
index 1d030a16d3c0f0..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optArgSync = exports.optArg = exports.assertRimrafOptions = exports.isRimrafOptions = void 0;
-const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t;
-const isRimrafOptions = (o) => !!o &&
- typeof o === 'object' &&
- typeOrUndef(o.preserveRoot, 'boolean') &&
- typeOrUndef(o.tmp, 'string') &&
- typeOrUndef(o.maxRetries, 'number') &&
- typeOrUndef(o.retryDelay, 'number') &&
- typeOrUndef(o.backoff, 'number') &&
- typeOrUndef(o.maxBackoff, 'number') &&
- (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&
- typeOrUndef(o.filter, 'function');
-exports.isRimrafOptions = isRimrafOptions;
-const assertRimrafOptions = (o) => {
- if (!(0, exports.isRimrafOptions)(o)) {
- throw new Error('invalid rimraf options');
- }
-};
-exports.assertRimrafOptions = assertRimrafOptions;
-const optArgT = (opt) => {
- (0, exports.assertRimrafOptions)(opt);
- const { glob, ...options } = opt;
- if (!glob) {
- return options;
- }
- const globOpt = glob === true ?
- opt.signal ?
- { signal: opt.signal }
- : {}
- : opt.signal ?
- {
- signal: opt.signal,
- ...glob,
- }
- : glob;
- return {
- ...options,
- glob: {
- ...globOpt,
- // always get absolute paths from glob, to ensure
- // that we are referencing the correct thing.
- absolute: true,
- withFileTypes: false,
- },
- };
-};
-const optArg = (opt = {}) => optArgT(opt);
-exports.optArg = optArg;
-const optArgSync = (opt = {}) => optArgT(opt);
-exports.optArgSync = optArgSync;
-//# sourceMappingURL=opt-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map
deleted file mode 100644
index d815735d639a46..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":";;;AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEzC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAVtB,QAAA,eAAe,mBAUO;AAE5B,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,IAAA,uBAAe,EAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AANY,QAAA,mBAAmB,uBAM/B;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,IAAA,2BAAmB,EAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAEM,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAAvD,QAAA,MAAM,UAAiD;AAC7D,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAA1D,QAAA,UAAU,cAAgD","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/package.json b/deps/npm/node_modules/rimraf/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts
deleted file mode 100644
index c0b7e7cb4b15e3..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions } from './index.js';
-declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string;
-export default pathArg;
-//# sourceMappingURL=path-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map
deleted file mode 100644
index 4fe93c3a8aec47..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js
deleted file mode 100644
index 8a4908aa08ef50..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js
+++ /dev/null
@@ -1,52 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const path_1 = require("path");
-const util_1 = require("util");
-const platform_js_1 = __importDefault(require("./platform.js"));
-const pathArg = (path, opt = {}) => {
- const type = typeof path;
- if (type !== 'string') {
- const ctor = path && type === 'object' && path.constructor;
- const received = ctor && ctor.name ? `an instance of ${ctor.name}`
- : type === 'object' ? (0, util_1.inspect)(path)
- : `type ${type} ${path}`;
- const msg = 'The "path" argument must be of type string. ' + `Received ${received}`;
- throw Object.assign(new TypeError(msg), {
- path,
- code: 'ERR_INVALID_ARG_TYPE',
- });
- }
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- const msg = 'path must be a string without null bytes';
- throw Object.assign(new TypeError(msg), {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- });
- }
- path = (0, path_1.resolve)(path);
- const { root } = (0, path_1.parse)(path);
- if (path === root && opt.preserveRoot !== false) {
- const msg = 'refusing to remove root directory without preserveRoot:false';
- throw Object.assign(new Error(msg), {
- path,
- code: 'ERR_PRESERVE_ROOT',
- });
- }
- if (platform_js_1.default === 'win32') {
- const badWinChars = /[*|"<>?:]/;
- const { root } = (0, path_1.parse)(path);
- if (badWinChars.test(path.substring(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- });
- }
- }
- return path;
-};
-exports.default = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map
deleted file mode 100644
index 40e4a19e7003a0..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":";;;;;AAAA,+BAAqC;AACrC,+BAA8B;AAE9B,gEAAoC;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAA,cAAO,EAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,qBAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,kBAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts
deleted file mode 100644
index e127a8e529ffd2..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-declare const _default: string;
-export default _default;
-//# sourceMappingURL=platform.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map
deleted file mode 100644
index ef2e6734f8cfbb..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js
deleted file mode 100644
index 58f197ffbf8249..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js
+++ /dev/null
@@ -1,4 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.default = process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform;
-//# sourceMappingURL=platform.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map
deleted file mode 100644
index 814cdb8c244c57..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";;AAAA,kBAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts
deleted file mode 100644
index cce73097f1681f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const readdirOrError: (path: string) => Promise;
-export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException;
-//# sourceMappingURL=readdir-or-error.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map
deleted file mode 100644
index 8a19f6bdfd0706..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js
deleted file mode 100644
index 75330cb3816c8b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.readdirOrErrorSync = exports.readdirOrError = void 0;
-// returns an array of entries if readdir() works,
-// or the error that readdir() raised if not.
-const fs_js_1 = require("./fs.js");
-const { readdir } = fs_js_1.promises;
-const readdirOrError = (path) => readdir(path).catch(er => er);
-exports.readdirOrError = readdirOrError;
-const readdirOrErrorSync = (path) => {
- try {
- return (0, fs_js_1.readdirSync)(path);
- }
- catch (er) {
- return er;
- }
-};
-exports.readdirOrErrorSync = readdirOrErrorSync;
-//# sourceMappingURL=readdir-or-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map
deleted file mode 100644
index 61dbfe11956140..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":";;;AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,mCAA+C;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAQ,CAAA;AACrB,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AAD3C,QAAA,cAAc,kBAC6B;AACjD,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,IAAA,mBAAW,EAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA;AANY,QAAA,kBAAkB,sBAM9B","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts
deleted file mode 100644
index c0af0dd62f0df9..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { RimrafAsyncOptions, RimrafOptions } from './index.js';
-export declare const MAXBACKOFF = 200;
-export declare const RATE = 1.2;
-export declare const MAXRETRIES = 10;
-export declare const codes: Set;
-export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise;
-export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any;
-//# sourceMappingURL=retry-busy.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map
deleted file mode 100644
index 21960c58914b4b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js
deleted file mode 100644
index 5f9d15252bb10f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js
+++ /dev/null
@@ -1,68 +0,0 @@
-"use strict";
-// note: max backoff is the maximum that any *single* backoff will do
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.retryBusySync = exports.retryBusy = exports.codes = exports.MAXRETRIES = exports.RATE = exports.MAXBACKOFF = void 0;
-exports.MAXBACKOFF = 200;
-exports.RATE = 1.2;
-exports.MAXRETRIES = 10;
-exports.codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']);
-const retryBusy = (fn) => {
- const method = async (path, opt, backoff = 1, total = 0) => {
- const mbo = opt.maxBackoff || exports.MAXBACKOFF;
- const rate = opt.backoff || exports.RATE;
- const max = opt.maxRetries || exports.MAXRETRIES;
- let retries = 0;
- while (true) {
- try {
- return await fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.path === path && fer?.code && exports.codes.has(fer.code)) {
- backoff = Math.ceil(backoff * rate);
- total = backoff + total;
- if (total < mbo) {
- return new Promise((res, rej) => {
- setTimeout(() => {
- method(path, opt, backoff, total).then(res, rej);
- }, backoff);
- });
- }
- if (retries < max) {
- retries++;
- continue;
- }
- }
- throw er;
- }
- }
- };
- return method;
-};
-exports.retryBusy = retryBusy;
-// just retries, no async so no backoff
-const retryBusySync = (fn) => {
- const method = (path, opt) => {
- const max = opt.maxRetries || exports.MAXRETRIES;
- let retries = 0;
- while (true) {
- try {
- return fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.path === path &&
- fer?.code &&
- exports.codes.has(fer.code) &&
- retries < max) {
- retries++;
- continue;
- }
- throw er;
- }
- }
- };
- return method;
-};
-exports.retryBusySync = retryBusySync;
-//# sourceMappingURL=retry-busy.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map
deleted file mode 100644
index 1f1051d2f115fc..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAIxD,QAAA,UAAU,GAAG,GAAG,CAAA;AAChB,QAAA,IAAI,GAAG,GAAG,CAAA;AACV,QAAA,UAAU,GAAG,EAAE,CAAA;AACf,QAAA,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAEpD,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,YAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AArCY,QAAA,SAAS,aAqCrB;AAED,uCAAuC;AAChC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAvBY,QAAA,aAAa,iBAuBzB","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts
deleted file mode 100644
index 35c5c86844c7fa..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise;
-export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-manual.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map
deleted file mode 100644
index 19bd25149ceb07..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js
deleted file mode 100644
index 1c95ae23bb98b1..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimrafManualSync = exports.rimrafManual = void 0;
-const platform_js_1 = __importDefault(require("./platform.js"));
-const rimraf_posix_js_1 = require("./rimraf-posix.js");
-const rimraf_windows_js_1 = require("./rimraf-windows.js");
-exports.rimrafManual = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindows : rimraf_posix_js_1.rimrafPosix;
-exports.rimrafManualSync = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindowsSync : rimraf_posix_js_1.rimrafPosixSync;
-//# sourceMappingURL=rimraf-manual.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map
deleted file mode 100644
index e26e44577d9f0d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":";;;;;;AAAA,gEAAoC;AAEpC,uDAAgE;AAChE,2DAAsE;AAEzD,QAAA,YAAY,GAAG,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iCAAa,CAAC,CAAC,CAAC,6BAAW,CAAA;AACjE,QAAA,gBAAgB,GAC3B,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,qCAAiB,CAAC,CAAC,CAAC,iCAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts
deleted file mode 100644
index 5d41d40825e4c7..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-move-remove.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map
deleted file mode 100644
index 4062eaebbb1302..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js
deleted file mode 100644
index ac668d1c9dbbae..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js
+++ /dev/null
@@ -1,192 +0,0 @@
-"use strict";
-// https://youtu.be/uhRWMGBjlO8?t=537
-//
-// 1. readdir
-// 2. for each entry
-// a. if a non-empty directory, recurse
-// b. if an empty directory, move to random hidden file name in $TEMP
-// c. unlink/rmdir $TEMP
-//
-// This works around the fact that unlink/rmdir is non-atomic and takes
-// a non-deterministic amount of time to complete.
-//
-// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimrafMoveRemoveSync = exports.rimrafMoveRemove = void 0;
-const path_1 = require("path");
-const default_tmp_js_1 = require("./default-tmp.js");
-const ignore_enoent_js_1 = require("./ignore-enoent.js");
-const fs_js_1 = require("./fs.js");
-const { lstat, rename, unlink, rmdir, chmod } = fs_js_1.promises;
-const readdir_or_error_js_1 = require("./readdir-or-error.js");
-// crypto.randomBytes is much slower, and Math.random() is enough here
-const uniqueFilename = (path) => `.${(0, path_1.basename)(path)}.${Math.random()}`;
-const unlinkFixEPERM = async (path) => unlink(path).catch((er) => {
- if (er.code === 'EPERM') {
- return chmod(path, 0o666).then(() => unlink(path), er2 => {
- if (er2.code === 'ENOENT') {
- return;
- }
- throw er;
- });
- }
- else if (er.code === 'ENOENT') {
- return;
- }
- throw er;
-});
-const unlinkFixEPERMSync = (path) => {
- try {
- (0, fs_js_1.unlinkSync)(path);
- }
- catch (er) {
- if (er?.code === 'EPERM') {
- try {
- return (0, fs_js_1.chmodSync)(path, 0o666);
- }
- catch (er2) {
- if (er2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- }
- else if (er?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
-};
-const rimrafMoveRemove = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafMoveRemoveDir(path, opt, await lstat(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafMoveRemove = rimrafMoveRemove;
-const rimrafMoveRemoveDir = async (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- if (!opt.tmp) {
- return rimrafMoveRemoveDir(path, { ...opt, tmp: await (0, default_tmp_js_1.defaultTmp)(path) }, ent);
- }
- if (path === opt.tmp && (0, path_1.parse)(path).root !== path) {
- throw new Error('cannot delete temp directory used for deletion');
- }
- const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, unlinkFixEPERM));
- return true;
- }
- const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true);
- if (!removedAll) {
- return false;
- }
- // we don't ever ACTUALLY try to unlink /, because that can never work
- // but when preserveRoot is false, we could be operating on it.
- // No need to check if preserveRoot is not false.
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, rmdir));
- return true;
-};
-const tmpUnlink = async (path, tmp, rm) => {
- const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path));
- await rename(path, tmpFile);
- return await rm(tmpFile);
-};
-const rimrafMoveRemoveSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafMoveRemoveDirSync(path, opt, (0, fs_js_1.lstatSync)(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafMoveRemoveSync = rimrafMoveRemoveSync;
-const rimrafMoveRemoveDirSync = (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- if (!opt.tmp) {
- return rimrafMoveRemoveDirSync(path, { ...opt, tmp: (0, default_tmp_js_1.defaultTmpSync)(path) }, ent);
- }
- const tmp = opt.tmp;
- if (path === opt.tmp && (0, path_1.parse)(path).root !== path) {
- throw new Error('cannot delete temp directory used for deletion');
- }
- const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const p = (0, path_1.resolve)(path, ent.name);
- removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, fs_js_1.rmdirSync));
- return true;
-};
-const tmpUnlinkSync = (path, tmp, rmSync) => {
- const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path));
- (0, fs_js_1.renameSync)(path, tmpFile);
- return rmSync(tmpFile);
-};
-//# sourceMappingURL=rimraf-move-remove.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map
deleted file mode 100644
index 44602502b90b2d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":";AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;;;AAE1E,+BAA+C;AAC/C,qDAA6D;AAE7D,yDAAmE;AAEnE,mCAOgB;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAU,CAAA;AAI1D,+DAA0E;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,IAAA,eAAQ,EAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEM,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAbY,QAAA,gBAAgB,oBAa5B;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,IAAA,2BAAU,EAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAEM,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,oBAAoB,wBAUhC;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,IAAA,+BAAc,EAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,iBAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,IAAA,kBAAU,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts
deleted file mode 100644
index cc84bf7ffd34d0..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map
deleted file mode 100644
index bea6b79965192f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js
deleted file mode 100644
index ab9f633d7ca157..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimrafNativeSync = exports.rimrafNative = void 0;
-const fs_js_1 = require("./fs.js");
-const { rm } = fs_js_1.promises;
-const rimrafNative = async (path, opt) => {
- await rm(path, {
- ...opt,
- force: true,
- recursive: true,
- });
- return true;
-};
-exports.rimrafNative = rimrafNative;
-const rimrafNativeSync = (path, opt) => {
- (0, fs_js_1.rmSync)(path, {
- ...opt,
- force: true,
- recursive: true,
- });
- return true;
-};
-exports.rimrafNativeSync = rimrafNativeSync;
-//# sourceMappingURL=rimraf-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map
deleted file mode 100644
index 6eddd444e49a13..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":";;;AACA,mCAA0C;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,gBAAQ,CAAA;AAEhB,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,YAAY,gBAUxB;AAEM,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAA,cAAM,EAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,gBAAgB,oBAU5B","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts
deleted file mode 100644
index 8e532efe9aba21..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-posix.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map
deleted file mode 100644
index 3f9b8084ed470b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js
deleted file mode 100644
index eb0e7f11680107..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-// the simple recursive removal, where unlink and rmdir are atomic
-// Note that this approach does NOT work on Windows!
-// We stat first and only unlink if the Dirent isn't a directory,
-// because sunos will let root unlink a directory, and some
-// SUPER weird breakage happens as a result.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimrafPosixSync = exports.rimrafPosix = void 0;
-const fs_js_1 = require("./fs.js");
-const { lstat, rmdir, unlink } = fs_js_1.promises;
-const path_1 = require("path");
-const readdir_or_error_js_1 = require("./readdir-or-error.js");
-const ignore_enoent_js_1 = require("./ignore-enoent.js");
-const rimrafPosix = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafPosixDir(path, opt, await lstat(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafPosix = rimrafPosix;
-const rimrafPosixSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafPosixDirSync(path, opt, (0, fs_js_1.lstatSync)(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafPosixSync = rimrafPosixSync;
-const rimrafPosixDir = async (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await (0, ignore_enoent_js_1.ignoreENOENT)(unlink(path));
- return true;
- }
- const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true);
- if (!removedAll) {
- return false;
- }
- // we don't ever ACTUALLY try to unlink /, because that can never work
- // but when preserveRoot is false, we could be operating on it.
- // No need to check if preserveRoot is not false.
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await (0, ignore_enoent_js_1.ignoreENOENT)(rmdir(path));
- return true;
-};
-const rimrafPosixDirSync = (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.unlinkSync)(path));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const p = (0, path_1.resolve)(path, ent.name);
- removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll;
- }
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.rmdirSync)(path));
- return true;
-};
-//# sourceMappingURL=rimraf-posix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map
deleted file mode 100644
index 32a366a54f7e3c..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":";AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;;;AAE5C,mCAAoE;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,gBAAQ,CAAA;AAEzC,+BAAqC;AAErC,+DAA0E;AAI1E,yDAAmE;AAE5D,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,WAAW,eAUvB;AAEM,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,eAAe,mBAU3B;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,IAAA,+BAAY,EAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts
deleted file mode 100644
index 555689073ffe75..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-windows.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map
deleted file mode 100644
index 56f00d9f2e3d13..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js
deleted file mode 100644
index 8d19f98f963606..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js
+++ /dev/null
@@ -1,182 +0,0 @@
-"use strict";
-// This is the same as rimrafPosix, with the following changes:
-//
-// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff
-// 2. All non-directories are removed first and then all directories are
-// removed in a second sweep.
-// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on
-// the that folder.
-//
-// Note: "move then remove" is 2-10 times slower, and just as unreliable.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.rimrafWindowsSync = exports.rimrafWindows = void 0;
-const path_1 = require("path");
-const fix_eperm_js_1 = require("./fix-eperm.js");
-const fs_js_1 = require("./fs.js");
-const ignore_enoent_js_1 = require("./ignore-enoent.js");
-const readdir_or_error_js_1 = require("./readdir-or-error.js");
-const retry_busy_js_1 = require("./retry-busy.js");
-const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js");
-const { unlink, rmdir, lstat } = fs_js_1.promises;
-const rimrafWindowsFile = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(unlink));
-const rimrafWindowsFileSync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.unlinkSync));
-const rimrafWindowsDirRetry = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(rmdir));
-const rimrafWindowsDirRetrySync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.rmdirSync));
-const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => {
- /* c8 ignore start */
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- /* c8 ignore stop */
- // already filtered, remove from options so we don't call unnecessarily
- const { filter, ...options } = opt;
- try {
- return await rimrafWindowsDirRetry(path, options);
- }
- catch (er) {
- if (er?.code === 'ENOTEMPTY') {
- return await (0, rimraf_move_remove_js_1.rimrafMoveRemove)(path, options);
- }
- throw er;
- }
-};
-const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- // already filtered, remove from options so we don't call unnecessarily
- const { filter, ...options } = opt;
- try {
- return rimrafWindowsDirRetrySync(path, options);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOTEMPTY') {
- return (0, rimraf_move_remove_js_1.rimrafMoveRemoveSync)(path, options);
- }
- throw er;
- }
-};
-const START = Symbol('start');
-const CHILD = Symbol('child');
-const FINISH = Symbol('finish');
-const rimrafWindows = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafWindowsDir(path, opt, await lstat(path), START);
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafWindows = rimrafWindows;
-const rimrafWindowsSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafWindowsDirSync(path, opt, (0, fs_js_1.lstatSync)(path), START);
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-exports.rimrafWindowsSync = rimrafWindowsSync;
-const rimrafWindowsDir = async (path, opt, ent, state = START) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- // is a file
- await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsFile(path, opt));
- return true;
- }
- const s = state === START ? CHILD : state;
- const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir((0, path_1.resolve)(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true);
- if (state === START) {
- return rimrafWindowsDir(path, opt, ent, FINISH);
- }
- else if (state === FINISH) {
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsDirMoveRemoveFallback(path, opt));
- }
- return true;
-};
-const rimrafWindowsDirSync = (path, opt, ent, state = START) => {
- const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- // is a file
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => rimrafWindowsFileSync(path, opt));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const s = state === START ? CHILD : state;
- const p = (0, path_1.resolve)(path, ent.name);
- removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll;
- }
- if (state === START) {
- return rimrafWindowsDirSync(path, opt, ent, FINISH);
- }
- else if (state === FINISH) {
- if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- (0, ignore_enoent_js_1.ignoreENOENTSync)(() => {
- rimrafWindowsDirMoveRemoveFallbackSync(path, opt);
- });
- }
- return true;
-};
-//# sourceMappingURL=rimraf-windows.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map
deleted file mode 100644
index 50a97f890d84aa..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":";AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;;;AAGzE,+BAAqC;AAErC,iDAAuD;AACvD,mCAAoE;AACpE,yDAAmE;AACnE,+DAA0E;AAC1E,mDAA0D;AAC1D,mEAAgF;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,kBAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,iBAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,IAAA,wCAAgB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,IAAA,4CAAoB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAExB,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,aAAa,iBAUzB;AAEM,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,iBAAiB,qBAU7B;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,IAAA,+BAAY,EAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts
deleted file mode 100644
index e191fd90da93d3..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafOptions } from './index.js';
-export declare const useNative: (opt?: RimrafAsyncOptions) => boolean;
-export declare const useNativeSync: (opt?: RimrafOptions) => boolean;
-//# sourceMappingURL=use-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map
deleted file mode 100644
index b182beb1707a7d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js
deleted file mode 100644
index 1f668768d96bcb..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNativeSync = exports.useNative = void 0;
-const platform_js_1 = __importDefault(require("./platform.js"));
-const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-/* c8 ignore start */
-const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10));
-/* c8 ignore stop */
-const hasNative = major > 14 || (major === 14 && minor >= 14);
-// we do NOT use native by default on Windows, because Node's native
-// rm implementation is less advanced. Change this code if that changes.
-exports.useNative = !hasNative || platform_js_1.default === 'win32' ?
- () => false
- : opt => !opt?.signal && !opt?.filter;
-exports.useNativeSync = !hasNative || platform_js_1.default === 'win32' ?
- () => false
- : opt => !opt?.signal && !opt?.filter;
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map
deleted file mode 100644
index a89b8db7e68352..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":";;;;;;AACA,gEAAoC;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AAC5D,QAAA,SAAS,GACpB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AAC1B,QAAA,aAAa,GACxB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts
deleted file mode 100644
index 5600d7c766e6d6..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env node
-export declare const help: string;
-declare const main: {
- (...args: string[]): Promise<1 | 0>;
- help: string;
-};
-export default main;
-//# sourceMappingURL=bin.d.mts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map
deleted file mode 100644
index a5f1ec2cb6c8e8..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"bin.d.mts","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AAcA,eAAO,MAAM,IAAI,QAkChB,CAAA;AA8ED,QAAA,MAAM,IAAI;cAAmB,MAAM,EAAE;;CAoIpC,CAAA;AAGD,eAAe,IAAI,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs
deleted file mode 100755
index 4aea35e9c43256..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env node
-import { readFile } from 'fs/promises';
-import { rimraf } from './index.js';
-const pj = fileURLToPath(new URL('../package.json', import.meta.url));
-const pjDist = fileURLToPath(new URL('../../package.json', import.meta.url));
-const { version } = JSON.parse(await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8')));
-const runHelpForUsage = () => console.error('run `rimraf --help` for usage information');
-export const help = `rimraf version ${version}
-
-Usage: rimraf [ ...]
-Deletes all files and folders at "path", recursively.
-
-Options:
- -- Treat all subsequent arguments as paths
- -h --help Display this usage info
- --preserve-root Do not remove '/' recursively (default)
- --no-preserve-root Do not treat '/' specially
- -G --no-glob Treat arguments as literal paths, not globs (default)
- -g --glob Treat arguments as glob patterns
- -v --verbose Be verbose when deleting files, showing them as
- they are removed. Not compatible with --impl=native
- -V --no-verbose Be silent when deleting files, showing nothing as
- they are removed (default)
- -i --interactive Ask for confirmation before deleting anything
- Not compatible with --impl=native
- -I --no-interactive Do not ask for confirmation before deleting
-
- --impl= Specify the implementation to use:
- rimraf: choose the best option (default)
- native: the built-in implementation in Node.js
- manual: the platform-specific JS implementation
- posix: the Posix JS implementation
- windows: the Windows JS implementation (falls back to
- move-remove on ENOTEMPTY)
- move-remove: a slow reliable Windows fallback
-
-Implementation-specific options:
- --tmp= Temp file folder for 'move-remove' implementation
- --max-retries= maxRetries for 'native' and 'windows' implementations
- --retry-delay= retryDelay for 'native' implementation, default 100
- --backoff= Exponential backoff factor for retries (default: 1.2)
-`;
-import { parse, relative, resolve } from 'path';
-const cwd = process.cwd();
-import { createInterface } from 'readline';
-import { fileURLToPath } from 'url';
-const prompt = async (rl, q) => new Promise(res => rl.question(q, res));
-const interactiveRimraf = async (impl, paths, opt) => {
- const existingFilter = opt.filter || (() => true);
- let allRemaining = false;
- let noneRemaining = false;
- const queue = [];
- let processing = false;
- const processQueue = async () => {
- if (processing)
- return;
- processing = true;
- let next;
- while ((next = queue.shift())) {
- await next();
- }
- processing = false;
- };
- const oneAtATime = (fn) => async (s, e) => {
- const p = new Promise(res => {
- queue.push(async () => {
- const result = await fn(s, e);
- res(result);
- return result;
- });
- });
- processQueue();
- return p;
- };
- const rl = createInterface({
- input: process.stdin,
- output: process.stdout,
- });
- opt.filter = oneAtATime(async (path, ent) => {
- if (noneRemaining) {
- return false;
- }
- while (!allRemaining) {
- const a = (await prompt(rl, `rm? ${relative(cwd, path)}\n[(Yes)/No/All/Quit] > `)).trim();
- if (/^n/i.test(a)) {
- return false;
- }
- else if (/^a/i.test(a)) {
- allRemaining = true;
- break;
- }
- else if (/^q/i.test(a)) {
- noneRemaining = true;
- return false;
- }
- else if (a === '' || /^y/i.test(a)) {
- break;
- }
- else {
- continue;
- }
- }
- return existingFilter(path, ent);
- });
- await impl(paths, opt);
- rl.close();
-};
-const main = async (...args) => {
- const verboseFilter = (s) => {
- console.log(relative(cwd, s));
- return true;
- };
- if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') {
- throw new Error('simulated rimraf failure');
- }
- const opt = {};
- const paths = [];
- let dashdash = false;
- let impl = rimraf;
- let interactive = false;
- for (const arg of args) {
- if (dashdash) {
- paths.push(arg);
- continue;
- }
- if (arg === '--') {
- dashdash = true;
- continue;
- }
- else if (arg === '-rf' || arg === '-fr') {
- // this never did anything, but people put it there I guess
- continue;
- }
- else if (arg === '-h' || arg === '--help') {
- console.log(help);
- return 0;
- }
- else if (arg === '--interactive' || arg === '-i') {
- interactive = true;
- continue;
- }
- else if (arg === '--no-interactive' || arg === '-I') {
- interactive = false;
- continue;
- }
- else if (arg === '--verbose' || arg === '-v') {
- opt.filter = verboseFilter;
- continue;
- }
- else if (arg === '--no-verbose' || arg === '-V') {
- opt.filter = undefined;
- continue;
- }
- else if (arg === '-g' || arg === '--glob') {
- opt.glob = true;
- continue;
- }
- else if (arg === '-G' || arg === '--no-glob') {
- opt.glob = false;
- continue;
- }
- else if (arg === '--preserve-root') {
- opt.preserveRoot = true;
- continue;
- }
- else if (arg === '--no-preserve-root') {
- opt.preserveRoot = false;
- continue;
- }
- else if (/^--tmp=/.test(arg)) {
- const val = arg.substring('--tmp='.length);
- opt.tmp = val;
- continue;
- }
- else if (/^--max-retries=/.test(arg)) {
- const val = +arg.substring('--max-retries='.length);
- opt.maxRetries = val;
- continue;
- }
- else if (/^--retry-delay=/.test(arg)) {
- const val = +arg.substring('--retry-delay='.length);
- opt.retryDelay = val;
- continue;
- }
- else if (/^--backoff=/.test(arg)) {
- const val = +arg.substring('--backoff='.length);
- opt.backoff = val;
- continue;
- }
- else if (/^--impl=/.test(arg)) {
- const val = arg.substring('--impl='.length);
- switch (val) {
- case 'rimraf':
- impl = rimraf;
- continue;
- case 'native':
- case 'manual':
- case 'posix':
- case 'windows':
- impl = rimraf[val];
- continue;
- case 'move-remove':
- impl = rimraf.moveRemove;
- continue;
- default:
- console.error(`unknown implementation: ${val}`);
- runHelpForUsage();
- return 1;
- }
- }
- else if (/^-/.test(arg)) {
- console.error(`unknown option: ${arg}`);
- runHelpForUsage();
- return 1;
- }
- else {
- paths.push(arg);
- }
- }
- if (opt.preserveRoot !== false) {
- for (const path of paths.map(p => resolve(p))) {
- if (path === parse(path).root) {
- console.error(`rimraf: it is dangerous to operate recursively on '/'`);
- console.error('use --no-preserve-root to override this failsafe');
- return 1;
- }
- }
- }
- if (!paths.length) {
- console.error('rimraf: must provide a path to remove');
- runHelpForUsage();
- return 1;
- }
- if (impl === rimraf.native && (interactive || opt.filter)) {
- console.error('native implementation does not support -v or -i');
- runHelpForUsage();
- return 1;
- }
- if (interactive) {
- await interactiveRimraf(impl, paths, opt);
- }
- else {
- await impl(paths, opt);
- }
- return 0;
-};
-main.help = help;
-export default main;
-if (process.env.__TESTING_RIMRAF_BIN__ !== '1') {
- const args = process.argv.slice(2);
- main(...args).then(code => process.exit(code), er => {
- console.error(er);
- process.exit(1);
- });
-}
-//# sourceMappingURL=bin.mjs.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map
deleted file mode 100644
index 163fc96df5b6a2..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AAEtC,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,MAAM,EAAE,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,iBAAiB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AACrE,MAAM,MAAM,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,oBAAoB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AAC5E,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAC5B,MAAM,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAC1C,CAAA;AAExB,MAAM,eAAe,GAAG,GAAG,EAAE,CAC3B,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,IAAI,GAAG,kBAAkB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkC5C,CAAA;AAED,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;AAGzB,OAAO,EAAE,eAAe,EAAa,MAAM,UAAU,CAAA;AACrD,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,MAAM,MAAM,GAAG,KAAK,EAAE,EAAa,EAAE,CAAS,EAAE,EAAE,CAChD,IAAI,OAAO,CAAS,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;AAEjD,MAAM,iBAAiB,GAAG,KAAK,EAC7B,IAA6E,EAC7E,KAAe,EACf,GAAuB,EACvB,EAAE;IACF,MAAM,cAAc,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IACjD,IAAI,YAAY,GAAG,KAAK,CAAA;IACxB,IAAI,aAAa,GAAG,KAAK,CAAA;IACzB,MAAM,KAAK,GAA+B,EAAE,CAAA;IAC5C,IAAI,UAAU,GAAG,KAAK,CAAA;IACtB,MAAM,YAAY,GAAG,KAAK,IAAI,EAAE;QAC9B,IAAI,UAAU;YAAE,OAAM;QACtB,UAAU,GAAG,IAAI,CAAA;QACjB,IAAI,IAA0C,CAAA;QAC9C,OAAO,CAAC,IAAI,GAAG,KAAK,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC;YAC9B,MAAM,IAAI,EAAE,CAAA;QACd,CAAC;QACD,UAAU,GAAG,KAAK,CAAA;IACpB,CAAC,CAAA;IACD,MAAM,UAAU,GACd,CAAC,EAAsD,EAAE,EAAE,CAC3D,KAAK,EAAE,CAAS,EAAE,CAAiB,EAAoB,EAAE;QACvD,MAAM,CAAC,GAAG,IAAI,OAAO,CAAU,GAAG,CAAC,EAAE;YACnC,KAAK,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;gBACpB,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;gBAC7B,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,OAAO,MAAM,CAAA;YACf,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,YAAY,EAAE,CAAA;QACd,OAAO,CAAC,CAAA;IACV,CAAC,CAAA;IACH,MAAM,EAAE,GAAG,eAAe,CAAC;QACzB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,MAAM,EAAE,OAAO,CAAC,MAAM;KACvB,CAAC,CAAA;IACF,GAAG,CAAC,MAAM,GAAG,UAAU,CACrB,KAAK,EAAE,IAAY,EAAE,GAAmB,EAAoB,EAAE;QAC5D,IAAI,aAAa,EAAE,CAAC;YAClB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,OAAO,CAAC,YAAY,EAAE,CAAC;YACrB,MAAM,CAAC,GAAG,CACR,MAAM,MAAM,CAAC,EAAE,EAAE,OAAO,QAAQ,CAAC,GAAG,EAAE,IAAI,CAAC,0BAA0B,CAAC,CACvE,CAAC,IAAI,EAAE,CAAA;YACR,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBAClB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,YAAY,GAAG,IAAI,CAAA;gBACnB,MAAK;YACP,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACrC,MAAK;YACP,CAAC;iBAAM,CAAC;gBACN,SAAQ;YACV,CAAC;QACH,CAAC;QACD,OAAO,cAAc,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAClC,CAAC,CACF,CAAA;IACD,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACtB,EAAE,CAAC,KAAK,EAAE,CAAA;AACZ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,KAAK,EAAE,GAAG,IAAc,EAAE,EAAE;IACvC,MAAM,aAAa,GAAG,CAAC,CAAS,EAAE,EAAE;QAClC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;QAC7B,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,IAAI,OAAO,CAAC,GAAG,CAAC,2BAA2B,KAAK,GAAG,EAAE,CAAC;QACpD,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAA;IAC7C,CAAC;IAED,MAAM,GAAG,GAAuB,EAAE,CAAA;IAClC,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,IAAI,QAAQ,GAAG,KAAK,CAAA;IACpB,IAAI,IAAI,GAGgB,MAAM,CAAA;IAE9B,IAAI,WAAW,GAAG,KAAK,CAAA;IAEvB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,IAAI,QAAQ,EAAE,CAAC;YACb,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;YACf,SAAQ;QACV,CAAC;QACD,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACjB,QAAQ,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG,KAAK,KAAK,EAAE,CAAC;YAC1C,2DAA2D;YAC3D,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,eAAe,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACnD,WAAW,GAAG,IAAI,CAAA;YAClB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,kBAAkB,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACtD,WAAW,GAAG,KAAK,CAAA;YACnB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,WAAW,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAC/C,GAAG,CAAC,MAAM,GAAG,aAAa,CAAA;YAC1B,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,cAAc,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAClD,GAAG,CAAC,MAAM,GAAG,SAAS,CAAA;YACtB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,GAAG,CAAC,IAAI,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE,CAAC;YAC/C,GAAG,CAAC,IAAI,GAAG,KAAK,CAAA;YAChB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,iBAAiB,EAAE,CAAC;YACrC,GAAG,CAAC,YAAY,GAAG,IAAI,CAAA;YACvB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,oBAAoB,EAAE,CAAC;YACxC,GAAG,CAAC,YAAY,GAAG,KAAK,CAAA;YACxB,SAAQ;QACV,CAAC;aAAM,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC/B,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YAC1C,GAAG,CAAC,GAAG,GAAG,GAAG,CAAA;YACb,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACnC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAA;YAC/C,GAAG,CAAC,OAAO,GAAG,GAAG,CAAA;YACjB,SAAQ;QACV,CAAC;aAAM,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAChC,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YAC3C,QAAQ,GAAG,EAAE,CAAC;gBACZ,KAAK,QAAQ;oBACX,IAAI,GAAG,MAAM,CAAA;oBACb,SAAQ;gBACV,KAAK,QAAQ,CAAC;gBACd,KAAK,QAAQ,CAAC;gBACd,KAAK,OAAO,CAAC;gBACb,KAAK,SAAS;oBACZ,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;oBAClB,SAAQ;gBACV,KAAK,aAAa;oBAChB,IAAI,GAAG,MAAM,CAAC,UAAU,CAAA;oBACxB,SAAQ;gBACV;oBACE,OAAO,CAAC,KAAK,CAAC,2BAA2B,GAAG,EAAE,CAAC,CAAA;oBAC/C,eAAe,EAAE,CAAA;oBACjB,OAAO,CAAC,CAAA;YACZ,CAAC;QACH,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC1B,OAAO,CAAC,KAAK,CAAC,mBAAmB,GAAG,EAAE,CAAC,CAAA;YACvC,eAAe,EAAE,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACjB,CAAC;IACH,CAAC;IAED,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAC/B,KAAK,MAAM,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC9C,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;gBAC9B,OAAO,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;gBACtE,OAAO,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAA;gBACjE,OAAO,CAAC,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,KAAK,CAAC,uCAAuC,CAAC,CAAA;QACtD,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,IAAI,KAAK,MAAM,CAAC,MAAM,IAAI,CAAC,WAAW,IAAI,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;QAC1D,OAAO,CAAC,KAAK,CAAC,iDAAiD,CAAC,CAAA;QAChE,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,iBAAiB,CAAC,IAAI,EAAE,KAAK,EAAE,GAAG,CAAC,CAAA;IAC3C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACxB,CAAC;IAED,OAAO,CAAC,CAAA;AACV,CAAC,CAAA;AACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;AAEhB,eAAe,IAAI,CAAA;AAEnB,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,KAAK,GAAG,EAAE,CAAC;IAC/C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAClC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAChB,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAC1B,EAAE,CAAC,EAAE;QACH,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAA;QACjB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC,CACF,CAAA;AACH,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { readFile } from 'fs/promises'\nimport type { RimrafAsyncOptions } from './index.js'\nimport { rimraf } from './index.js'\n\nconst pj = fileURLToPath(new URL('../package.json', import.meta.url))\nconst pjDist = fileURLToPath(new URL('../../package.json', import.meta.url))\nconst { version } = JSON.parse(\n await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8')),\n) as { version: string }\n\nconst runHelpForUsage = () =>\n console.error('run `rimraf --help` for usage information')\n\nexport const help = `rimraf version ${version}\n\nUsage: rimraf [ ...]\nDeletes all files and folders at \"path\", recursively.\n\nOptions:\n -- Treat all subsequent arguments as paths\n -h --help Display this usage info\n --preserve-root Do not remove '/' recursively (default)\n --no-preserve-root Do not treat '/' specially\n -G --no-glob Treat arguments as literal paths, not globs (default)\n -g --glob Treat arguments as glob patterns\n -v --verbose Be verbose when deleting files, showing them as\n they are removed. Not compatible with --impl=native\n -V --no-verbose Be silent when deleting files, showing nothing as\n they are removed (default)\n -i --interactive Ask for confirmation before deleting anything\n Not compatible with --impl=native\n -I --no-interactive Do not ask for confirmation before deleting\n\n --impl= Specify the implementation to use:\n rimraf: choose the best option (default)\n native: the built-in implementation in Node.js\n manual: the platform-specific JS implementation\n posix: the Posix JS implementation\n windows: the Windows JS implementation (falls back to\n move-remove on ENOTEMPTY)\n move-remove: a slow reliable Windows fallback\n\nImplementation-specific options:\n --tmp= Temp file folder for 'move-remove' implementation\n --max-retries= maxRetries for 'native' and 'windows' implementations\n --retry-delay= retryDelay for 'native' implementation, default 100\n --backoff= Exponential backoff factor for retries (default: 1.2)\n`\n\nimport { parse, relative, resolve } from 'path'\nconst cwd = process.cwd()\n\nimport { Dirent, Stats } from 'fs'\nimport { createInterface, Interface } from 'readline'\nimport { fileURLToPath } from 'url'\n\nconst prompt = async (rl: Interface, q: string) =>\n new Promise(res => rl.question(q, res))\n\nconst interactiveRimraf = async (\n impl: (path: string | string[], opt?: RimrafAsyncOptions) => Promise,\n paths: string[],\n opt: RimrafAsyncOptions,\n) => {\n const existingFilter = opt.filter || (() => true)\n let allRemaining = false\n let noneRemaining = false\n const queue: (() => Promise)[] = []\n let processing = false\n const processQueue = async () => {\n if (processing) return\n processing = true\n let next: (() => Promise) | undefined\n while ((next = queue.shift())) {\n await next()\n }\n processing = false\n }\n const oneAtATime =\n (fn: (s: string, e: Dirent | Stats) => Promise) =>\n async (s: string, e: Dirent | Stats): Promise => {\n const p = new Promise(res => {\n queue.push(async () => {\n const result = await fn(s, e)\n res(result)\n return result\n })\n })\n processQueue()\n return p\n }\n const rl = createInterface({\n input: process.stdin,\n output: process.stdout,\n })\n opt.filter = oneAtATime(\n async (path: string, ent: Dirent | Stats): Promise => {\n if (noneRemaining) {\n return false\n }\n while (!allRemaining) {\n const a = (\n await prompt(rl, `rm? ${relative(cwd, path)}\\n[(Yes)/No/All/Quit] > `)\n ).trim()\n if (/^n/i.test(a)) {\n return false\n } else if (/^a/i.test(a)) {\n allRemaining = true\n break\n } else if (/^q/i.test(a)) {\n noneRemaining = true\n return false\n } else if (a === '' || /^y/i.test(a)) {\n break\n } else {\n continue\n }\n }\n return existingFilter(path, ent)\n },\n )\n await impl(paths, opt)\n rl.close()\n}\n\nconst main = async (...args: string[]) => {\n const verboseFilter = (s: string) => {\n console.log(relative(cwd, s))\n return true\n }\n\n if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') {\n throw new Error('simulated rimraf failure')\n }\n\n const opt: RimrafAsyncOptions = {}\n const paths: string[] = []\n let dashdash = false\n let impl: (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ) => Promise = rimraf\n\n let interactive = false\n\n for (const arg of args) {\n if (dashdash) {\n paths.push(arg)\n continue\n }\n if (arg === '--') {\n dashdash = true\n continue\n } else if (arg === '-rf' || arg === '-fr') {\n // this never did anything, but people put it there I guess\n continue\n } else if (arg === '-h' || arg === '--help') {\n console.log(help)\n return 0\n } else if (arg === '--interactive' || arg === '-i') {\n interactive = true\n continue\n } else if (arg === '--no-interactive' || arg === '-I') {\n interactive = false\n continue\n } else if (arg === '--verbose' || arg === '-v') {\n opt.filter = verboseFilter\n continue\n } else if (arg === '--no-verbose' || arg === '-V') {\n opt.filter = undefined\n continue\n } else if (arg === '-g' || arg === '--glob') {\n opt.glob = true\n continue\n } else if (arg === '-G' || arg === '--no-glob') {\n opt.glob = false\n continue\n } else if (arg === '--preserve-root') {\n opt.preserveRoot = true\n continue\n } else if (arg === '--no-preserve-root') {\n opt.preserveRoot = false\n continue\n } else if (/^--tmp=/.test(arg)) {\n const val = arg.substring('--tmp='.length)\n opt.tmp = val\n continue\n } else if (/^--max-retries=/.test(arg)) {\n const val = +arg.substring('--max-retries='.length)\n opt.maxRetries = val\n continue\n } else if (/^--retry-delay=/.test(arg)) {\n const val = +arg.substring('--retry-delay='.length)\n opt.retryDelay = val\n continue\n } else if (/^--backoff=/.test(arg)) {\n const val = +arg.substring('--backoff='.length)\n opt.backoff = val\n continue\n } else if (/^--impl=/.test(arg)) {\n const val = arg.substring('--impl='.length)\n switch (val) {\n case 'rimraf':\n impl = rimraf\n continue\n case 'native':\n case 'manual':\n case 'posix':\n case 'windows':\n impl = rimraf[val]\n continue\n case 'move-remove':\n impl = rimraf.moveRemove\n continue\n default:\n console.error(`unknown implementation: ${val}`)\n runHelpForUsage()\n return 1\n }\n } else if (/^-/.test(arg)) {\n console.error(`unknown option: ${arg}`)\n runHelpForUsage()\n return 1\n } else {\n paths.push(arg)\n }\n }\n\n if (opt.preserveRoot !== false) {\n for (const path of paths.map(p => resolve(p))) {\n if (path === parse(path).root) {\n console.error(`rimraf: it is dangerous to operate recursively on '/'`)\n console.error('use --no-preserve-root to override this failsafe')\n return 1\n }\n }\n }\n\n if (!paths.length) {\n console.error('rimraf: must provide a path to remove')\n runHelpForUsage()\n return 1\n }\n\n if (impl === rimraf.native && (interactive || opt.filter)) {\n console.error('native implementation does not support -v or -i')\n runHelpForUsage()\n return 1\n }\n\n if (interactive) {\n await interactiveRimraf(impl, paths, opt)\n } else {\n await impl(paths, opt)\n }\n\n return 0\n}\nmain.help = help\n\nexport default main\n\nif (process.env.__TESTING_RIMRAF_BIN__ !== '1') {\n const args = process.argv.slice(2)\n main(...args).then(\n code => process.exit(code),\n er => {\n console.error(er)\n process.exit(1)\n },\n )\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts
deleted file mode 100644
index a68e925b249a8d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const defaultTmp: (path: string) => Promise;
-export declare const defaultTmpSync: (path: string) => string;
-//# sourceMappingURL=default-tmp.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map
deleted file mode 100644
index d0b35f2786233b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js
deleted file mode 100644
index fb0846af5c3acf..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// The default temporary folder location for use in the windows algorithm.
-// It's TEMPting to use dirname(path), since that's guaranteed to be on the
-// same device. However, this means that:
-// rimraf(path).then(() => rimraf(dirname(path)))
-// will often fail with EBUSY, because the parent dir contains
-// marked-for-deletion directory entries (which do not show up in readdir).
-// The approach here is to use os.tmpdir() if it's on the same drive letter,
-// or resolve(path, '\\temp') if it exists, or the root of the drive if not.
-// On Posix (not that you'd be likely to use the windows algorithm there),
-// it uses os.tmpdir() always.
-import { tmpdir } from 'os';
-import { parse, resolve } from 'path';
-import { promises, statSync } from './fs.js';
-import platform from './platform.js';
-const { stat } = promises;
-const isDirSync = (path) => {
- try {
- return statSync(path).isDirectory();
- }
- catch (er) {
- return false;
- }
-};
-const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false);
-const win32DefaultTmp = async (path) => {
- const { root } = parse(path);
- const tmp = tmpdir();
- const { root: tmpRoot } = parse(tmp);
- if (root.toLowerCase() === tmpRoot.toLowerCase()) {
- return tmp;
- }
- const driveTmp = resolve(root, '/temp');
- if (await isDir(driveTmp)) {
- return driveTmp;
- }
- return root;
-};
-const win32DefaultTmpSync = (path) => {
- const { root } = parse(path);
- const tmp = tmpdir();
- const { root: tmpRoot } = parse(tmp);
- if (root.toLowerCase() === tmpRoot.toLowerCase()) {
- return tmp;
- }
- const driveTmp = resolve(root, '/temp');
- if (isDirSync(driveTmp)) {
- return driveTmp;
- }
- return root;
-};
-const posixDefaultTmp = async () => tmpdir();
-const posixDefaultTmpSync = () => tmpdir();
-export const defaultTmp = platform === 'win32' ? win32DefaultTmp : posixDefaultTmp;
-export const defaultTmpSync = platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync;
-//# sourceMappingURL=default-tmp.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map
deleted file mode 100644
index ea6af37802aa83..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,OAAO,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAC3B,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC5C,OAAO,QAAQ,MAAM,eAAe,CAAA;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,QAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,MAAM,EAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,MAAM,EAAE,CAAA;AAE1C,MAAM,CAAC,MAAM,UAAU,GACrB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC1D,MAAM,CAAC,MAAM,cAAc,GACzB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts
deleted file mode 100644
index 20e76a82c4942e..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise;
-export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any;
-//# sourceMappingURL=fix-eperm.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map
deleted file mode 100644
index ac17d6f4e060bb..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js
deleted file mode 100644
index 633c0e119df1f7..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js
+++ /dev/null
@@ -1,53 +0,0 @@
-import { chmodSync, promises } from './fs.js';
-const { chmod } = promises;
-export const fixEPERM = (fn) => async (path) => {
- try {
- return await fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOENT') {
- return;
- }
- if (fer?.code === 'EPERM') {
- try {
- await chmod(path, 0o666);
- }
- catch (er2) {
- const fer2 = er2;
- if (fer2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- return await fn(path);
- }
- throw er;
- }
-};
-export const fixEPERMSync = (fn) => (path) => {
- try {
- return fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOENT') {
- return;
- }
- if (fer?.code === 'EPERM') {
- try {
- chmodSync(path, 0o666);
- }
- catch (er2) {
- const fer2 = er2;
- if (fer2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- return fn(path);
- }
- throw er;
- }
-};
-//# sourceMappingURL=fix-eperm.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map
deleted file mode 100644
index a6aa032b4891e8..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAE1B,MAAM,CAAC,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts
deleted file mode 100644
index 9e4e95b4e7a411..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-import fs, { Dirent } from 'fs';
-export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs';
-export declare const readdirSync: (path: fs.PathLike) => Dirent[];
-export declare const promises: {
- chmod: (path: fs.PathLike, mode: fs.Mode) => Promise;
- mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & {
- recursive?: boolean | null;
- }) | undefined | null) => Promise;
- readdir: (path: fs.PathLike) => Promise;
- rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise;
- rm: (path: fs.PathLike, options: fs.RmOptions) => Promise;
- rmdir: (path: fs.PathLike) => Promise;
- stat: (path: fs.PathLike) => Promise;
- lstat: (path: fs.PathLike) => Promise;
- unlink: (path: fs.PathLike) => Promise;
-};
-//# sourceMappingURL=fs.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map
deleted file mode 100644
index 8c8b1034cbcd27..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js b/deps/npm/node_modules/rimraf/dist/esm/fs.js
deleted file mode 100644
index f9422ce992a546..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fs.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// promisify ourselves, because older nodes don't have fs.promises
-import fs from 'fs';
-// sync ones just take the sync version from node
-export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs';
-import { readdirSync as rdSync } from 'fs';
-export const readdirSync = (path) => rdSync(path, { withFileTypes: true });
-// unrolled for better inlining, this seems to get better performance
-// than something like:
-// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)
-// which would be a bit cleaner.
-const chmod = (path, mode) => new Promise((res, rej) => fs.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d))));
-const mkdir = (path, options) => new Promise((res, rej) => fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))));
-const readdir = (path) => new Promise((res, rej) => fs.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data)));
-const rename = (oldPath, newPath) => new Promise((res, rej) => fs.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d)));
-const rm = (path, options) => new Promise((res, rej) => fs.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d))));
-const rmdir = (path) => new Promise((res, rej) => fs.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d))));
-const stat = (path) => new Promise((res, rej) => fs.stat(path, (er, data) => (er ? rej(er) : res(data))));
-const lstat = (path) => new Promise((res, rej) => fs.lstat(path, (er, data) => (er ? rej(er) : res(data))));
-const unlink = (path) => new Promise((res, rej) => fs.unlink(path, (er, ...d) => (er ? rej(er) : res(...d))));
-export const promises = {
- chmod,
- mkdir,
- readdir,
- rename,
- rm,
- rmdir,
- stat,
- lstat,
- unlink,
-};
-//# sourceMappingURL=fs.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js.map b/deps/npm/node_modules/rimraf/dist/esm/fs.js.map
deleted file mode 100644
index c4c5d0f2cc42b4..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/fs.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAElE,OAAO,EAAc,MAAM,IAAI,CAAA;AAE/B,iDAAiD;AACjD,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAEX,OAAO,EAAE,WAAW,IAAI,MAAM,EAAE,MAAM,IAAI,CAAA;AAC1C,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,MAAM,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEH,MAAM,CAAC,MAAM,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts
deleted file mode 100644
index f158cc27025b16..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const ignoreENOENT: (p: Promise) => Promise;
-export declare const ignoreENOENTSync: (fn: () => any) => any;
-//# sourceMappingURL=ignore-enoent.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map
deleted file mode 100644
index 2cfb3bbac5fab7..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js
deleted file mode 100644
index 753f4811cd384f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js
+++ /dev/null
@@ -1,16 +0,0 @@
-export const ignoreENOENT = async (p) => p.catch(er => {
- if (er.code !== 'ENOENT') {
- throw er;
- }
-});
-export const ignoreENOENTSync = (fn) => {
- try {
- return fn();
- }
- catch (er) {
- if (er?.code !== 'ENOENT') {
- throw er;
- }
- }
-};
-//# sourceMappingURL=ignore-enoent.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map
deleted file mode 100644
index acffb146233e13..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AAEJ,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts
deleted file mode 100644
index 9ec4a124ab613d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js';
-export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js';
-export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise;
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & {
- sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
- };
- moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean;
-};
-//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map
deleted file mode 100644
index 0dc659ca730252..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js b/deps/npm/node_modules/rimraf/dist/esm/index.js
deleted file mode 100644
index d94d6f81a485c9..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/index.js
+++ /dev/null
@@ -1,70 +0,0 @@
-import { glob, globSync } from 'glob';
-import { optArg, optArgSync, } from './opt-arg.js';
-import pathArg from './path-arg.js';
-import { rimrafManual, rimrafManualSync } from './rimraf-manual.js';
-import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js';
-import { rimrafNative, rimrafNativeSync } from './rimraf-native.js';
-import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js';
-import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js';
-import { useNative, useNativeSync } from './use-native.js';
-export { assertRimrafOptions, isRimrafOptions, } from './opt-arg.js';
-const wrap = (fn) => async (path, opt) => {
- const options = optArg(opt);
- if (options.glob) {
- path = await glob(path, options.glob);
- }
- if (Array.isArray(path)) {
- return !!(await Promise.all(path.map(p => fn(pathArg(p, options), options)))).reduce((a, b) => a && b, true);
- }
- else {
- return !!(await fn(pathArg(path, options), options));
- }
-};
-const wrapSync = (fn) => (path, opt) => {
- const options = optArgSync(opt);
- if (options.glob) {
- path = globSync(path, options.glob);
- }
- if (Array.isArray(path)) {
- return !!path
- .map(p => fn(pathArg(p, options), options))
- .reduce((a, b) => a && b, true);
- }
- else {
- return !!fn(pathArg(path, options), options);
- }
-};
-export const nativeSync = wrapSync(rimrafNativeSync);
-export const native = Object.assign(wrap(rimrafNative), { sync: nativeSync });
-export const manualSync = wrapSync(rimrafManualSync);
-export const manual = Object.assign(wrap(rimrafManual), { sync: manualSync });
-export const windowsSync = wrapSync(rimrafWindowsSync);
-export const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync });
-export const posixSync = wrapSync(rimrafPosixSync);
-export const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync });
-export const moveRemoveSync = wrapSync(rimrafMoveRemoveSync);
-export const moveRemove = Object.assign(wrap(rimrafMoveRemove), {
- sync: moveRemoveSync,
-});
-export const rimrafSync = wrapSync((path, opt) => useNativeSync(opt) ?
- rimrafNativeSync(path, opt)
- : rimrafManualSync(path, opt));
-export const sync = rimrafSync;
-const rimraf_ = wrap((path, opt) => useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt));
-export const rimraf = Object.assign(rimraf_, {
- rimraf: rimraf_,
- sync: rimrafSync,
- rimrafSync: rimrafSync,
- manual,
- manualSync,
- native,
- nativeSync,
- posix,
- posixSync,
- windows,
- windowsSync,
- moveRemove,
- moveRemoveSync,
-});
-rimraf.rimraf = rimraf;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js.map b/deps/npm/node_modules/rimraf/dist/esm/index.js.map
deleted file mode 100644
index 0c8ca64c74f236..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EACL,MAAM,EACN,UAAU,GAGX,MAAM,cAAc,CAAA;AACrB,OAAO,OAAO,MAAM,eAAe,CAAA;AACnC,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AACtE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAE1D,OAAO,EACL,mBAAmB,EACnB,eAAe,GAIhB,MAAM,cAAc,CAAA;AAErB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,WAAW,GAAG,QAAQ,CAAC,iBAAiB,CAAC,CAAA;AACtD,MAAM,CAAC,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC,CAAA;AAEhF,MAAM,CAAC,MAAM,SAAS,GAAG,QAAQ,CAAC,eAAe,CAAC,CAAA;AAClD,MAAM,CAAC,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAE1E,MAAM,CAAC,MAAM,cAAc,GAAG,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC5D,MAAM,CAAC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,cAAc;CACrB,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC;IAClB,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACD,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,UAAU;IAChB,UAAU,EAAE,UAAU;IACtB,MAAM;IACN,UAAU;IACV,MAAM;IACN,UAAU;IACV,KAAK;IACL,SAAS;IACT,OAAO;IACP,WAAW;IACX,UAAU;IACV,cAAc;CACf,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts
deleted file mode 100644
index c869d4ae85251b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { Dirent, Stats } from 'fs';
-import { GlobOptions } from 'glob';
-export declare const isRimrafOptions: (o: any) => o is RimrafOptions;
-export declare const assertRimrafOptions: (o: any) => void;
-export interface RimrafAsyncOptions {
- preserveRoot?: boolean;
- tmp?: string;
- maxRetries?: number;
- retryDelay?: number;
- backoff?: number;
- maxBackoff?: number;
- signal?: AbortSignal;
- glob?: boolean | GlobOptions;
- filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise);
-}
-export interface RimrafSyncOptions extends RimrafAsyncOptions {
- filter?: (path: string, ent: Dirent | Stats) => boolean;
-}
-export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions;
-export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & {
- glob: GlobOptions & {
- withFileTypes: false;
- };
-}) | (RimrafAsyncOptions & {
- glob: undefined;
-});
-export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & {
- glob: GlobOptions & {
- withFileTypes: false;
- };
-}) | (RimrafSyncOptions & {
- glob: undefined;
-});
-//# sourceMappingURL=opt-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map
deleted file mode 100644
index 89e83b205ac628..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js
deleted file mode 100644
index eacfe6c4325e22..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js
+++ /dev/null
@@ -1,46 +0,0 @@
-const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t;
-export const isRimrafOptions = (o) => !!o &&
- typeof o === 'object' &&
- typeOrUndef(o.preserveRoot, 'boolean') &&
- typeOrUndef(o.tmp, 'string') &&
- typeOrUndef(o.maxRetries, 'number') &&
- typeOrUndef(o.retryDelay, 'number') &&
- typeOrUndef(o.backoff, 'number') &&
- typeOrUndef(o.maxBackoff, 'number') &&
- (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&
- typeOrUndef(o.filter, 'function');
-export const assertRimrafOptions = (o) => {
- if (!isRimrafOptions(o)) {
- throw new Error('invalid rimraf options');
- }
-};
-const optArgT = (opt) => {
- assertRimrafOptions(opt);
- const { glob, ...options } = opt;
- if (!glob) {
- return options;
- }
- const globOpt = glob === true ?
- opt.signal ?
- { signal: opt.signal }
- : {}
- : opt.signal ?
- {
- signal: opt.signal,
- ...glob,
- }
- : glob;
- return {
- ...options,
- glob: {
- ...globOpt,
- // always get absolute paths from glob, to ensure
- // that we are referencing the correct thing.
- absolute: true,
- withFileTypes: false,
- },
- };
-};
-export const optArg = (opt = {}) => optArgT(opt);
-export const optArgSync = (opt = {}) => optArgT(opt);
-//# sourceMappingURL=opt-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map
deleted file mode 100644
index 82ff94f6e05b8e..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEhD,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAEnC,MAAM,CAAC,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,mBAAmB,CAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AACpE,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/package.json b/deps/npm/node_modules/rimraf/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts
deleted file mode 100644
index c0b7e7cb4b15e3..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions } from './index.js';
-declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string;
-export default pathArg;
-//# sourceMappingURL=path-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map
deleted file mode 100644
index 4fe93c3a8aec47..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js
deleted file mode 100644
index f32cb106756dbc..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js
+++ /dev/null
@@ -1,47 +0,0 @@
-import { parse, resolve } from 'path';
-import { inspect } from 'util';
-import platform from './platform.js';
-const pathArg = (path, opt = {}) => {
- const type = typeof path;
- if (type !== 'string') {
- const ctor = path && type === 'object' && path.constructor;
- const received = ctor && ctor.name ? `an instance of ${ctor.name}`
- : type === 'object' ? inspect(path)
- : `type ${type} ${path}`;
- const msg = 'The "path" argument must be of type string. ' + `Received ${received}`;
- throw Object.assign(new TypeError(msg), {
- path,
- code: 'ERR_INVALID_ARG_TYPE',
- });
- }
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- const msg = 'path must be a string without null bytes';
- throw Object.assign(new TypeError(msg), {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- });
- }
- path = resolve(path);
- const { root } = parse(path);
- if (path === root && opt.preserveRoot !== false) {
- const msg = 'refusing to remove root directory without preserveRoot:false';
- throw Object.assign(new Error(msg), {
- path,
- code: 'ERR_PRESERVE_ROOT',
- });
- }
- if (platform === 'win32') {
- const badWinChars = /[*|"<>?:]/;
- const { root } = parse(path);
- if (badWinChars.test(path.substring(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- });
- }
- }
- return path;
-};
-export default pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map
deleted file mode 100644
index 2f73f5f38929be..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAE9B,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,QAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,eAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts
deleted file mode 100644
index e127a8e529ffd2..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-declare const _default: string;
-export default _default;
-//# sourceMappingURL=platform.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map
deleted file mode 100644
index ef2e6734f8cfbb..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js b/deps/npm/node_modules/rimraf/dist/esm/platform.js
deleted file mode 100644
index a2641721b78190..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/platform.js
+++ /dev/null
@@ -1,2 +0,0 @@
-export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform;
-//# sourceMappingURL=platform.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js.map b/deps/npm/node_modules/rimraf/dist/esm/platform.js.map
deleted file mode 100644
index c5fdaf9c0e428c..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/platform.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":"AAAA,eAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts
deleted file mode 100644
index cce73097f1681f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const readdirOrError: (path: string) => Promise;
-export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException;
-//# sourceMappingURL=readdir-or-error.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map
deleted file mode 100644
index 8a19f6bdfd0706..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js
deleted file mode 100644
index 71235135c63009..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// returns an array of entries if readdir() works,
-// or the error that readdir() raised if not.
-import { promises, readdirSync } from './fs.js';
-const { readdir } = promises;
-export const readdirOrError = (path) => readdir(path).catch(er => er);
-export const readdirOrErrorSync = (path) => {
- try {
- return readdirSync(path);
- }
- catch (er) {
- return er;
- }
-};
-//# sourceMappingURL=readdir-or-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map
deleted file mode 100644
index 1d0c00efde2e76..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAA;AAC5B,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AACxD,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,WAAW,CAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts
deleted file mode 100644
index c0af0dd62f0df9..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { RimrafAsyncOptions, RimrafOptions } from './index.js';
-export declare const MAXBACKOFF = 200;
-export declare const RATE = 1.2;
-export declare const MAXRETRIES = 10;
-export declare const codes: Set;
-export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise;
-export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any;
-//# sourceMappingURL=retry-busy.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map
deleted file mode 100644
index 21960c58914b4b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js
deleted file mode 100644
index 17e336a4d583f5..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js
+++ /dev/null
@@ -1,63 +0,0 @@
-// note: max backoff is the maximum that any *single* backoff will do
-export const MAXBACKOFF = 200;
-export const RATE = 1.2;
-export const MAXRETRIES = 10;
-export const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']);
-export const retryBusy = (fn) => {
- const method = async (path, opt, backoff = 1, total = 0) => {
- const mbo = opt.maxBackoff || MAXBACKOFF;
- const rate = opt.backoff || RATE;
- const max = opt.maxRetries || MAXRETRIES;
- let retries = 0;
- while (true) {
- try {
- return await fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.path === path && fer?.code && codes.has(fer.code)) {
- backoff = Math.ceil(backoff * rate);
- total = backoff + total;
- if (total < mbo) {
- return new Promise((res, rej) => {
- setTimeout(() => {
- method(path, opt, backoff, total).then(res, rej);
- }, backoff);
- });
- }
- if (retries < max) {
- retries++;
- continue;
- }
- }
- throw er;
- }
- }
- };
- return method;
-};
-// just retries, no async so no backoff
-export const retryBusySync = (fn) => {
- const method = (path, opt) => {
- const max = opt.maxRetries || MAXRETRIES;
- let retries = 0;
- while (true) {
- try {
- return fn(path);
- }
- catch (er) {
- const fer = er;
- if (fer?.path === path &&
- fer?.code &&
- codes.has(fer.code) &&
- retries < max) {
- retries++;
- continue;
- }
- throw er;
- }
- }
- };
- return method;
-};
-//# sourceMappingURL=retry-busy.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map
deleted file mode 100644
index f4239eb4142234..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAAA,qEAAqE;AAIrE,MAAM,CAAC,MAAM,UAAU,GAAG,GAAG,CAAA;AAC7B,MAAM,CAAC,MAAM,IAAI,GAAG,GAAG,CAAA;AACvB,MAAM,CAAC,MAAM,UAAU,GAAG,EAAE,CAAA;AAC5B,MAAM,CAAC,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAE3D,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,IAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,uCAAuC;AACvC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts
deleted file mode 100644
index 35c5c86844c7fa..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise;
-export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-manual.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map
deleted file mode 100644
index 19bd25149ceb07..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js
deleted file mode 100644
index 132708ffaa5870..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js
+++ /dev/null
@@ -1,6 +0,0 @@
-import platform from './platform.js';
-import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js';
-import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js';
-export const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix;
-export const rimrafManualSync = platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync;
-//# sourceMappingURL=rimraf-manual.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map
deleted file mode 100644
index 212c815dc356d0..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AAEtE,MAAM,CAAC,MAAM,YAAY,GAAG,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,WAAW,CAAA;AAC9E,MAAM,CAAC,MAAM,gBAAgB,GAC3B,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,eAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts
deleted file mode 100644
index 5d41d40825e4c7..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-move-remove.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map
deleted file mode 100644
index 4062eaebbb1302..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js
deleted file mode 100644
index 093e40f49f5a2e..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js
+++ /dev/null
@@ -1,187 +0,0 @@
-// https://youtu.be/uhRWMGBjlO8?t=537
-//
-// 1. readdir
-// 2. for each entry
-// a. if a non-empty directory, recurse
-// b. if an empty directory, move to random hidden file name in $TEMP
-// c. unlink/rmdir $TEMP
-//
-// This works around the fact that unlink/rmdir is non-atomic and takes
-// a non-deterministic amount of time to complete.
-//
-// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.
-import { basename, parse, resolve } from 'path';
-import { defaultTmp, defaultTmpSync } from './default-tmp.js';
-import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js';
-import { chmodSync, lstatSync, promises as fsPromises, renameSync, rmdirSync, unlinkSync, } from './fs.js';
-const { lstat, rename, unlink, rmdir, chmod } = fsPromises;
-import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js';
-// crypto.randomBytes is much slower, and Math.random() is enough here
-const uniqueFilename = (path) => `.${basename(path)}.${Math.random()}`;
-const unlinkFixEPERM = async (path) => unlink(path).catch((er) => {
- if (er.code === 'EPERM') {
- return chmod(path, 0o666).then(() => unlink(path), er2 => {
- if (er2.code === 'ENOENT') {
- return;
- }
- throw er;
- });
- }
- else if (er.code === 'ENOENT') {
- return;
- }
- throw er;
-});
-const unlinkFixEPERMSync = (path) => {
- try {
- unlinkSync(path);
- }
- catch (er) {
- if (er?.code === 'EPERM') {
- try {
- return chmodSync(path, 0o666);
- }
- catch (er2) {
- if (er2?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
- }
- else if (er?.code === 'ENOENT') {
- return;
- }
- throw er;
- }
-};
-export const rimrafMoveRemove = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafMoveRemoveDir(path, opt, await lstat(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-const rimrafMoveRemoveDir = async (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- if (!opt.tmp) {
- return rimrafMoveRemoveDir(path, { ...opt, tmp: await defaultTmp(path) }, ent);
- }
- if (path === opt.tmp && parse(path).root !== path) {
- throw new Error('cannot delete temp directory used for deletion');
- }
- const entries = ent.isDirectory() ? await readdirOrError(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM));
- return true;
- }
- const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true);
- if (!removedAll) {
- return false;
- }
- // we don't ever ACTUALLY try to unlink /, because that can never work
- // but when preserveRoot is false, we could be operating on it.
- // No need to check if preserveRoot is not false.
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir));
- return true;
-};
-const tmpUnlink = async (path, tmp, rm) => {
- const tmpFile = resolve(tmp, uniqueFilename(path));
- await rename(path, tmpFile);
- return await rm(tmpFile);
-};
-export const rimrafMoveRemoveSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafMoveRemoveDirSync(path, opt, lstatSync(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-const rimrafMoveRemoveDirSync = (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- if (!opt.tmp) {
- return rimrafMoveRemoveDirSync(path, { ...opt, tmp: defaultTmpSync(path) }, ent);
- }
- const tmp = opt.tmp;
- if (path === opt.tmp && parse(path).root !== path) {
- throw new Error('cannot delete temp directory used for deletion');
- }
- const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const p = resolve(path, ent.name);
- removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync));
- return true;
-};
-const tmpUnlinkSync = (path, tmp, rmSync) => {
- const tmpFile = resolve(tmp, uniqueFilename(path));
- renameSync(path, tmpFile);
- return rmSync(tmpFile);
-};
-//# sourceMappingURL=rimraf-move-remove.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map
deleted file mode 100644
index b9a1fd57490501..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;AAE1E,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAA;AAE7D,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,OAAO,EACL,SAAS,EACT,SAAS,EACT,QAAQ,IAAI,UAAU,EACtB,UAAU,EACV,SAAS,EACT,UAAU,GACX,MAAM,SAAS,CAAA;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,UAAU,CAAA;AAI1D,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,UAAU,CAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts
deleted file mode 100644
index cc84bf7ffd34d0..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map
deleted file mode 100644
index bea6b79965192f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js
deleted file mode 100644
index 719161fc9e85ca..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js
+++ /dev/null
@@ -1,19 +0,0 @@
-import { promises, rmSync } from './fs.js';
-const { rm } = promises;
-export const rimrafNative = async (path, opt) => {
- await rm(path, {
- ...opt,
- force: true,
- recursive: true,
- });
- return true;
-};
-export const rimrafNativeSync = (path, opt) => {
- rmSync(path, {
- ...opt,
- force: true,
- recursive: true,
- });
- return true;
-};
-//# sourceMappingURL=rimraf-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map
deleted file mode 100644
index fde373ba4ea13f..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,QAAQ,CAAA;AAEvB,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,MAAM,CAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts
deleted file mode 100644
index 8e532efe9aba21..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-posix.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map
deleted file mode 100644
index 3f9b8084ed470b..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js
deleted file mode 100644
index 356a477765a665..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js
+++ /dev/null
@@ -1,118 +0,0 @@
-// the simple recursive removal, where unlink and rmdir are atomic
-// Note that this approach does NOT work on Windows!
-// We stat first and only unlink if the Dirent isn't a directory,
-// because sunos will let root unlink a directory, and some
-// SUPER weird breakage happens as a result.
-import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js';
-const { lstat, rmdir, unlink } = promises;
-import { parse, resolve } from 'path';
-import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js';
-import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js';
-export const rimrafPosix = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafPosixDir(path, opt, await lstat(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-export const rimrafPosixSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafPosixDirSync(path, opt, lstatSync(path));
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-const rimrafPosixDir = async (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? await readdirOrError(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await ignoreENOENT(unlink(path));
- return true;
- }
- const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true);
- if (!removedAll) {
- return false;
- }
- // we don't ever ACTUALLY try to unlink /, because that can never work
- // but when preserveRoot is false, we could be operating on it.
- // No need to check if preserveRoot is not false.
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await ignoreENOENT(rmdir(path));
- return true;
-};
-const rimrafPosixDirSync = (path, opt, ent) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- ignoreENOENTSync(() => unlinkSync(path));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const p = resolve(path, ent.name);
- removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll;
- }
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- ignoreENOENTSync(() => rmdirSync(path));
- return true;
-};
-//# sourceMappingURL=rimraf-posix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map
deleted file mode 100644
index e301324d3f2649..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;AAE5C,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAA;AAEzC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAI1E,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,MAAM,CAAC,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,gBAAgB,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts
deleted file mode 100644
index 555689073ffe75..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js';
-export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise;
-export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean;
-//# sourceMappingURL=rimraf-windows.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map
deleted file mode 100644
index 56f00d9f2e3d13..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js
deleted file mode 100644
index bd2fa80657848d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js
+++ /dev/null
@@ -1,177 +0,0 @@
-// This is the same as rimrafPosix, with the following changes:
-//
-// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff
-// 2. All non-directories are removed first and then all directories are
-// removed in a second sweep.
-// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on
-// the that folder.
-//
-// Note: "move then remove" is 2-10 times slower, and just as unreliable.
-import { parse, resolve } from 'path';
-import { fixEPERM, fixEPERMSync } from './fix-eperm.js';
-import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js';
-import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js';
-import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js';
-import { retryBusy, retryBusySync } from './retry-busy.js';
-import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js';
-const { unlink, rmdir, lstat } = promises;
-const rimrafWindowsFile = retryBusy(fixEPERM(unlink));
-const rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync));
-const rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir));
-const rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync));
-const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => {
- /* c8 ignore start */
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- /* c8 ignore stop */
- // already filtered, remove from options so we don't call unnecessarily
- const { filter, ...options } = opt;
- try {
- return await rimrafWindowsDirRetry(path, options);
- }
- catch (er) {
- if (er?.code === 'ENOTEMPTY') {
- return await rimrafMoveRemove(path, options);
- }
- throw er;
- }
-};
-const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- // already filtered, remove from options so we don't call unnecessarily
- const { filter, ...options } = opt;
- try {
- return rimrafWindowsDirRetrySync(path, options);
- }
- catch (er) {
- const fer = er;
- if (fer?.code === 'ENOTEMPTY') {
- return rimrafMoveRemoveSync(path, options);
- }
- throw er;
- }
-};
-const START = Symbol('start');
-const CHILD = Symbol('child');
-const FINISH = Symbol('finish');
-export const rimrafWindows = async (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return await rimrafWindowsDir(path, opt, await lstat(path), START);
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-export const rimrafWindowsSync = (path, opt) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- try {
- return rimrafWindowsDirSync(path, opt, lstatSync(path), START);
- }
- catch (er) {
- if (er?.code === 'ENOENT')
- return true;
- throw er;
- }
-};
-const rimrafWindowsDir = async (path, opt, ent, state = START) => {
- if (opt?.signal?.aborted) {
- throw opt.signal.reason;
- }
- const entries = ent.isDirectory() ? await readdirOrError(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- // is a file
- await ignoreENOENT(rimrafWindowsFile(path, opt));
- return true;
- }
- const s = state === START ? CHILD : state;
- const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir(resolve(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true);
- if (state === START) {
- return rimrafWindowsDir(path, opt, ent, FINISH);
- }
- else if (state === FINISH) {
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !(await opt.filter(path, ent))) {
- return false;
- }
- await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt));
- }
- return true;
-};
-const rimrafWindowsDirSync = (path, opt, ent, state = START) => {
- const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null;
- if (!Array.isArray(entries)) {
- // this can only happen if lstat/readdir lied, or if the dir was
- // swapped out with a file at just the right moment.
- /* c8 ignore start */
- if (entries) {
- if (entries.code === 'ENOENT') {
- return true;
- }
- if (entries.code !== 'ENOTDIR') {
- throw entries;
- }
- }
- /* c8 ignore stop */
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- // is a file
- ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt));
- return true;
- }
- let removedAll = true;
- for (const ent of entries) {
- const s = state === START ? CHILD : state;
- const p = resolve(path, ent.name);
- removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll;
- }
- if (state === START) {
- return rimrafWindowsDirSync(path, opt, ent, FINISH);
- }
- else if (state === FINISH) {
- if (opt.preserveRoot === false && path === parse(path).root) {
- return false;
- }
- if (!removedAll) {
- return false;
- }
- if (opt.filter && !opt.filter(path, ent)) {
- return false;
- }
- ignoreENOENTSync(() => {
- rimrafWindowsDirMoveRemoveFallbackSync(path, opt);
- });
- }
- return true;
-};
-//# sourceMappingURL=rimraf-windows.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map
deleted file mode 100644
index 4b136c1e7f1f78..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;AAGzE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAC1E,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,aAAa,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,aAAa,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,oBAAoB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAE/B,MAAM,CAAC,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,YAAY,CAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,gBAAgB,CAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts
deleted file mode 100644
index e191fd90da93d3..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { RimrafAsyncOptions, RimrafOptions } from './index.js';
-export declare const useNative: (opt?: RimrafAsyncOptions) => boolean;
-export declare const useNativeSync: (opt?: RimrafOptions) => boolean;
-//# sourceMappingURL=use-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map
deleted file mode 100644
index b182beb1707a7d..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js b/deps/npm/node_modules/rimraf/dist/esm/use-native.js
deleted file mode 100644
index bf1ea5a14c5aa8..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/use-native.js
+++ /dev/null
@@ -1,16 +0,0 @@
-import platform from './platform.js';
-const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-/* c8 ignore start */
-const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10));
-/* c8 ignore stop */
-const hasNative = major > 14 || (major === 14 && minor >= 14);
-// we do NOT use native by default on Windows, because Node's native
-// rm implementation is less advanced. Change this code if that changes.
-export const useNative = !hasNative || platform === 'win32' ?
- () => false
- : opt => !opt?.signal && !opt?.filter;
-export const useNativeSync = !hasNative || platform === 'win32' ?
- () => false
- : opt => !opt?.signal && !opt?.filter;
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map
deleted file mode 100644
index 32da371e4181b6..00000000000000
--- a/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AACzE,MAAM,CAAC,MAAM,SAAS,GACpB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AACvC,MAAM,CAAC,MAAM,aAAa,GACxB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json
deleted file mode 100644
index 212180c8e3fcc4..00000000000000
--- a/deps/npm/node_modules/rimraf/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "name": "rimraf",
- "version": "5.0.10",
- "publishConfig": {
- "tag": "v5-legacy"
- },
- "type": "module",
- "tshy": {
- "main": true,
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "bin": "./dist/esm/bin.mjs",
- "main": "./dist/commonjs/index.js",
- "types": "./dist/commonjs/index.d.ts",
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "files": [
- "dist"
- ],
- "description": "A deep deletion module for node (like `rm -rf`)",
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "ISC",
- "repository": "git://github.com/isaacs/rimraf.git",
- "scripts": {
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "test": "tap",
- "snap": "tap",
- "format": "prettier --write . --log-level warn",
- "benchmark": "node benchmark/index.js",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "prettier": {
- "experimentalTernaries": true,
- "semi": false,
- "printWidth": 80,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- },
- "devDependencies": {
- "@types/node": "^20.12.11",
- "mkdirp": "^3.0.1",
- "prettier": "^3.2.5",
- "tap": "^19.0.1",
- "tshy": "^1.14.0",
- "typedoc": "^0.25.13",
- "typescript": "^5.4.5"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "dependencies": {
- "glob": "^10.3.7"
- },
- "keywords": [
- "rm",
- "rm -rf",
- "rm -fr",
- "remove",
- "directory",
- "cli",
- "rmdir",
- "recursive"
- ],
- "module": "./dist/esm/index.js"
-}
diff --git a/deps/npm/node_modules/semver/README.md b/deps/npm/node_modules/semver/README.md
index b31570c9514e1c..67c311f87ca3cd 100644
--- a/deps/npm/node_modules/semver/README.md
+++ b/deps/npm/node_modules/semver/README.md
@@ -100,7 +100,7 @@ Options:
-i --increment []
Increment a version by the specified level. Level can
be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
+ prepatch, prerelease, or release. Default level is 'patch'.
Only one version may be specified.
--preid
@@ -141,6 +141,8 @@ A "version" is described by the `v2.0.0` specification found at
.
A leading `"="` or `"v"` character is stripped off and ignored.
+Support for stripping a leading "v" is kept for compatibility with `v1.0.0` of the SemVer
+specification but should not be used anymore.
## Ranges
@@ -237,6 +239,13 @@ $ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1
```
+To get out of the prerelease phase, use the `release` option:
+
+```bash
+$ semver 1.2.4-beta.1 -i release
+1.2.4
+```
+
#### Prerelease Identifier Base
The method `.inc` takes an optional parameter 'identifierBase' string
@@ -415,10 +424,10 @@ Strict-mode Comparators and Ranges will be strict about the SemVer
strings that they parse.
* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release, options, identifier, identifierBase)`:
+* `inc(v, releaseType, options, identifier, identifierBase)`:
Return the version incremented by the release
type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
+ `prepatch`, `prerelease`, or `release`), or null if it's not valid
* `premajor` in one call will bump the version up to the next major
version and down to a prerelease of that major version.
`preminor`, and `prepatch` work the same way.
@@ -426,6 +435,7 @@ strings that they parse.
same as `prepatch`. It increments the patch version and then makes a
prerelease. If the input version is already a prerelease it simply
increments it.
+ * `release` will remove any prerelease part of the version.
* `identifier` can be used to prefix `premajor`, `preminor`,
`prepatch`, or `prerelease` version increments. `identifierBase`
is the base to be used for the `prerelease` identifier.
@@ -477,7 +487,7 @@ strings that they parse.
### Ranges
-* `validRange(range)`: Return the valid range or null if it's not valid
+* `validRange(range)`: Return the valid range or null if it's not valid.
* `satisfies(version, range)`: Return true if the version satisfies the
range.
* `maxSatisfying(versions, range)`: Return the highest version in the list
diff --git a/deps/npm/node_modules/semver/bin/semver.js b/deps/npm/node_modules/semver/bin/semver.js
index f62b566f74bc63..dbb1bf534ec722 100755
--- a/deps/npm/node_modules/semver/bin/semver.js
+++ b/deps/npm/node_modules/semver/bin/semver.js
@@ -3,6 +3,8 @@
// Exits successfully and prints matching version(s) if
// any supplied version is valid and passes all tests.
+'use strict'
+
const argv = process.argv.slice(2)
let versions = []
@@ -61,6 +63,7 @@ const main = () => {
switch (argv[0]) {
case 'major': case 'minor': case 'patch': case 'prerelease':
case 'premajor': case 'preminor': case 'prepatch':
+ case 'release':
inc = argv.shift()
break
default:
@@ -149,7 +152,7 @@ Options:
-i --increment []
Increment a version by the specified level. Level can
be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
+ prepatch, prerelease, or release. Default level is 'patch'.
Only one version may be specified.
--preid
diff --git a/deps/npm/node_modules/semver/classes/comparator.js b/deps/npm/node_modules/semver/classes/comparator.js
index 3d39c0eef78023..647c1f0976fd78 100644
--- a/deps/npm/node_modules/semver/classes/comparator.js
+++ b/deps/npm/node_modules/semver/classes/comparator.js
@@ -1,3 +1,5 @@
+'use strict'
+
const ANY = Symbol('SemVer ANY')
// hoisted class for cyclic dependency
class Comparator {
diff --git a/deps/npm/node_modules/semver/classes/index.js b/deps/npm/node_modules/semver/classes/index.js
index 5e3f5c9b19cef1..91c24ec4a72649 100644
--- a/deps/npm/node_modules/semver/classes/index.js
+++ b/deps/npm/node_modules/semver/classes/index.js
@@ -1,3 +1,5 @@
+'use strict'
+
module.exports = {
SemVer: require('./semver.js'),
Range: require('./range.js'),
diff --git a/deps/npm/node_modules/semver/classes/range.js b/deps/npm/node_modules/semver/classes/range.js
index ceee23144d3b89..f80c2359c6b82f 100644
--- a/deps/npm/node_modules/semver/classes/range.js
+++ b/deps/npm/node_modules/semver/classes/range.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SPACE_CHARACTERS = /\s+/g
// hoisted class for cyclic dependency
diff --git a/deps/npm/node_modules/semver/classes/semver.js b/deps/npm/node_modules/semver/classes/semver.js
index 13e66ce4415694..2efba0f4b6451e 100644
--- a/deps/npm/node_modules/semver/classes/semver.js
+++ b/deps/npm/node_modules/semver/classes/semver.js
@@ -1,3 +1,5 @@
+'use strict'
+
const debug = require('../internal/debug')
const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants')
const { safeRe: re, t } = require('../internal/re')
@@ -10,7 +12,7 @@ class SemVer {
if (version instanceof SemVer) {
if (version.loose === !!options.loose &&
- version.includePrerelease === !!options.includePrerelease) {
+ version.includePrerelease === !!options.includePrerelease) {
return version
} else {
version = version.version
@@ -176,6 +178,19 @@ class SemVer {
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc (release, identifier, identifierBase) {
+ if (release.startsWith('pre')) {
+ if (!identifier && identifierBase === false) {
+ throw new Error('invalid increment argument: identifier is empty')
+ }
+ // Avoid an invalid semver results
+ if (identifier) {
+ const match = `-${identifier}`.match(this.options.loose ? re[t.PRERELEASELOOSE] : re[t.PRERELEASE])
+ if (!match || match[1] !== identifier) {
+ throw new Error(`invalid identifier: ${identifier}`)
+ }
+ }
+ }
+
switch (release) {
case 'premajor':
this.prerelease.length = 0
@@ -206,6 +221,12 @@ class SemVer {
}
this.inc('pre', identifier, identifierBase)
break
+ case 'release':
+ if (this.prerelease.length === 0) {
+ throw new Error(`version ${this.raw} is not a prerelease`)
+ }
+ this.prerelease.length = 0
+ break
case 'major':
// If this is a pre-major version, bump up to the same major version.
@@ -249,10 +270,6 @@ class SemVer {
case 'pre': {
const base = Number(identifierBase) ? 1 : 0
- if (!identifier && identifierBase === false) {
- throw new Error('invalid increment argument: identifier is empty')
- }
-
if (this.prerelease.length === 0) {
this.prerelease = [base]
} else {
diff --git a/deps/npm/node_modules/semver/functions/clean.js b/deps/npm/node_modules/semver/functions/clean.js
index 811fe6b82cb73e..79703d6316617e 100644
--- a/deps/npm/node_modules/semver/functions/clean.js
+++ b/deps/npm/node_modules/semver/functions/clean.js
@@ -1,3 +1,5 @@
+'use strict'
+
const parse = require('./parse')
const clean = (version, options) => {
const s = parse(version.trim().replace(/^[=v]+/, ''), options)
diff --git a/deps/npm/node_modules/semver/functions/cmp.js b/deps/npm/node_modules/semver/functions/cmp.js
index 40119094747dd0..77487dcaac5f50 100644
--- a/deps/npm/node_modules/semver/functions/cmp.js
+++ b/deps/npm/node_modules/semver/functions/cmp.js
@@ -1,3 +1,5 @@
+'use strict'
+
const eq = require('./eq')
const neq = require('./neq')
const gt = require('./gt')
diff --git a/deps/npm/node_modules/semver/functions/coerce.js b/deps/npm/node_modules/semver/functions/coerce.js
index b378dcea4e5a74..cfe027599516f3 100644
--- a/deps/npm/node_modules/semver/functions/coerce.js
+++ b/deps/npm/node_modules/semver/functions/coerce.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const parse = require('./parse')
const { safeRe: re, t } = require('../internal/re')
diff --git a/deps/npm/node_modules/semver/functions/compare-build.js b/deps/npm/node_modules/semver/functions/compare-build.js
index 9eb881bef0fddc..99157cf3d105e0 100644
--- a/deps/npm/node_modules/semver/functions/compare-build.js
+++ b/deps/npm/node_modules/semver/functions/compare-build.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const compareBuild = (a, b, loose) => {
const versionA = new SemVer(a, loose)
diff --git a/deps/npm/node_modules/semver/functions/compare-loose.js b/deps/npm/node_modules/semver/functions/compare-loose.js
index 4881fbe00250c5..75316346a81cb3 100644
--- a/deps/npm/node_modules/semver/functions/compare-loose.js
+++ b/deps/npm/node_modules/semver/functions/compare-loose.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const compareLoose = (a, b) => compare(a, b, true)
module.exports = compareLoose
diff --git a/deps/npm/node_modules/semver/functions/compare.js b/deps/npm/node_modules/semver/functions/compare.js
index 748b7afa514a9f..63d8090c626cea 100644
--- a/deps/npm/node_modules/semver/functions/compare.js
+++ b/deps/npm/node_modules/semver/functions/compare.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const compare = (a, b, loose) =>
new SemVer(a, loose).compare(new SemVer(b, loose))
diff --git a/deps/npm/node_modules/semver/functions/diff.js b/deps/npm/node_modules/semver/functions/diff.js
index fc224e302c0e42..04e064e9196b58 100644
--- a/deps/npm/node_modules/semver/functions/diff.js
+++ b/deps/npm/node_modules/semver/functions/diff.js
@@ -1,3 +1,5 @@
+'use strict'
+
const parse = require('./parse.js')
const diff = (version1, version2) => {
@@ -27,20 +29,13 @@ const diff = (version1, version2) => {
return 'major'
}
- // Otherwise it can be determined by checking the high version
-
- if (highVersion.patch) {
- // anything higher than a patch bump would result in the wrong version
+ // If the main part has no difference
+ if (lowVersion.compareMain(highVersion) === 0) {
+ if (lowVersion.minor && !lowVersion.patch) {
+ return 'minor'
+ }
return 'patch'
}
-
- if (highVersion.minor) {
- // anything higher than a minor bump would result in the wrong version
- return 'minor'
- }
-
- // bumping major/minor/patch all have same result
- return 'major'
}
// add the `pre` prefix if we are going to a prerelease version
diff --git a/deps/npm/node_modules/semver/functions/eq.js b/deps/npm/node_modules/semver/functions/eq.js
index 271fed976f34a6..5f0eead1169fe5 100644
--- a/deps/npm/node_modules/semver/functions/eq.js
+++ b/deps/npm/node_modules/semver/functions/eq.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const eq = (a, b, loose) => compare(a, b, loose) === 0
module.exports = eq
diff --git a/deps/npm/node_modules/semver/functions/gt.js b/deps/npm/node_modules/semver/functions/gt.js
index d9b2156d8b56c3..84a57ddff50a09 100644
--- a/deps/npm/node_modules/semver/functions/gt.js
+++ b/deps/npm/node_modules/semver/functions/gt.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const gt = (a, b, loose) => compare(a, b, loose) > 0
module.exports = gt
diff --git a/deps/npm/node_modules/semver/functions/gte.js b/deps/npm/node_modules/semver/functions/gte.js
index 5aeaa634707a0c..7c52bdf2529ad8 100644
--- a/deps/npm/node_modules/semver/functions/gte.js
+++ b/deps/npm/node_modules/semver/functions/gte.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const gte = (a, b, loose) => compare(a, b, loose) >= 0
module.exports = gte
diff --git a/deps/npm/node_modules/semver/functions/inc.js b/deps/npm/node_modules/semver/functions/inc.js
index 7670b1bea1a497..ff999e9d04d7fa 100644
--- a/deps/npm/node_modules/semver/functions/inc.js
+++ b/deps/npm/node_modules/semver/functions/inc.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const inc = (version, release, options, identifier, identifierBase) => {
diff --git a/deps/npm/node_modules/semver/functions/lt.js b/deps/npm/node_modules/semver/functions/lt.js
index b440ab7d4212d3..2fb32a0e63c9a1 100644
--- a/deps/npm/node_modules/semver/functions/lt.js
+++ b/deps/npm/node_modules/semver/functions/lt.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const lt = (a, b, loose) => compare(a, b, loose) < 0
module.exports = lt
diff --git a/deps/npm/node_modules/semver/functions/lte.js b/deps/npm/node_modules/semver/functions/lte.js
index 6dcc956505584e..da9ee8f4e4404e 100644
--- a/deps/npm/node_modules/semver/functions/lte.js
+++ b/deps/npm/node_modules/semver/functions/lte.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const lte = (a, b, loose) => compare(a, b, loose) <= 0
module.exports = lte
diff --git a/deps/npm/node_modules/semver/functions/major.js b/deps/npm/node_modules/semver/functions/major.js
index 4283165e9d2719..e6d08dc20cf20b 100644
--- a/deps/npm/node_modules/semver/functions/major.js
+++ b/deps/npm/node_modules/semver/functions/major.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const major = (a, loose) => new SemVer(a, loose).major
module.exports = major
diff --git a/deps/npm/node_modules/semver/functions/minor.js b/deps/npm/node_modules/semver/functions/minor.js
index 57b3455f827bac..9e70ffda19223a 100644
--- a/deps/npm/node_modules/semver/functions/minor.js
+++ b/deps/npm/node_modules/semver/functions/minor.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const minor = (a, loose) => new SemVer(a, loose).minor
module.exports = minor
diff --git a/deps/npm/node_modules/semver/functions/neq.js b/deps/npm/node_modules/semver/functions/neq.js
index f944c01576973f..84326b77336103 100644
--- a/deps/npm/node_modules/semver/functions/neq.js
+++ b/deps/npm/node_modules/semver/functions/neq.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const neq = (a, b, loose) => compare(a, b, loose) !== 0
module.exports = neq
diff --git a/deps/npm/node_modules/semver/functions/parse.js b/deps/npm/node_modules/semver/functions/parse.js
index 459b3b17375c82..d544d33a7e93cb 100644
--- a/deps/npm/node_modules/semver/functions/parse.js
+++ b/deps/npm/node_modules/semver/functions/parse.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const parse = (version, options, throwErrors = false) => {
if (version instanceof SemVer) {
diff --git a/deps/npm/node_modules/semver/functions/patch.js b/deps/npm/node_modules/semver/functions/patch.js
index 63afca2524fca9..7675162f1742af 100644
--- a/deps/npm/node_modules/semver/functions/patch.js
+++ b/deps/npm/node_modules/semver/functions/patch.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const patch = (a, loose) => new SemVer(a, loose).patch
module.exports = patch
diff --git a/deps/npm/node_modules/semver/functions/prerelease.js b/deps/npm/node_modules/semver/functions/prerelease.js
index 06aa13248ae651..b8fe1db5049a23 100644
--- a/deps/npm/node_modules/semver/functions/prerelease.js
+++ b/deps/npm/node_modules/semver/functions/prerelease.js
@@ -1,3 +1,5 @@
+'use strict'
+
const parse = require('./parse')
const prerelease = (version, options) => {
const parsed = parse(version, options)
diff --git a/deps/npm/node_modules/semver/functions/rcompare.js b/deps/npm/node_modules/semver/functions/rcompare.js
index 0ac509e79dc8cf..8e1c222b2ffc24 100644
--- a/deps/npm/node_modules/semver/functions/rcompare.js
+++ b/deps/npm/node_modules/semver/functions/rcompare.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compare = require('./compare')
const rcompare = (a, b, loose) => compare(b, a, loose)
module.exports = rcompare
diff --git a/deps/npm/node_modules/semver/functions/rsort.js b/deps/npm/node_modules/semver/functions/rsort.js
index 82404c5cfe0266..5d3d20096844b1 100644
--- a/deps/npm/node_modules/semver/functions/rsort.js
+++ b/deps/npm/node_modules/semver/functions/rsort.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compareBuild = require('./compare-build')
const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose))
module.exports = rsort
diff --git a/deps/npm/node_modules/semver/functions/satisfies.js b/deps/npm/node_modules/semver/functions/satisfies.js
index 50af1c199b6cae..a0264a222ac82d 100644
--- a/deps/npm/node_modules/semver/functions/satisfies.js
+++ b/deps/npm/node_modules/semver/functions/satisfies.js
@@ -1,3 +1,5 @@
+'use strict'
+
const Range = require('../classes/range')
const satisfies = (version, range, options) => {
try {
diff --git a/deps/npm/node_modules/semver/functions/sort.js b/deps/npm/node_modules/semver/functions/sort.js
index 4d10917aba8e5a..edb24b1dc3324d 100644
--- a/deps/npm/node_modules/semver/functions/sort.js
+++ b/deps/npm/node_modules/semver/functions/sort.js
@@ -1,3 +1,5 @@
+'use strict'
+
const compareBuild = require('./compare-build')
const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose))
module.exports = sort
diff --git a/deps/npm/node_modules/semver/functions/valid.js b/deps/npm/node_modules/semver/functions/valid.js
index f27bae10731c0c..0db67edcb5952a 100644
--- a/deps/npm/node_modules/semver/functions/valid.js
+++ b/deps/npm/node_modules/semver/functions/valid.js
@@ -1,3 +1,5 @@
+'use strict'
+
const parse = require('./parse')
const valid = (version, options) => {
const v = parse(version, options)
diff --git a/deps/npm/node_modules/semver/index.js b/deps/npm/node_modules/semver/index.js
index 86d42ac16a840b..285662acb32892 100644
--- a/deps/npm/node_modules/semver/index.js
+++ b/deps/npm/node_modules/semver/index.js
@@ -1,3 +1,5 @@
+'use strict'
+
// just pre-load all the stuff that index.js lazily exports
const internalRe = require('./internal/re')
const constants = require('./internal/constants')
diff --git a/deps/npm/node_modules/semver/internal/constants.js b/deps/npm/node_modules/semver/internal/constants.js
index 94be1c570277a5..6d1db9154331d4 100644
--- a/deps/npm/node_modules/semver/internal/constants.js
+++ b/deps/npm/node_modules/semver/internal/constants.js
@@ -1,3 +1,5 @@
+'use strict'
+
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
const SEMVER_SPEC_VERSION = '2.0.0'
diff --git a/deps/npm/node_modules/semver/internal/debug.js b/deps/npm/node_modules/semver/internal/debug.js
index 1c00e1369aa2a0..20d1e9dceea90e 100644
--- a/deps/npm/node_modules/semver/internal/debug.js
+++ b/deps/npm/node_modules/semver/internal/debug.js
@@ -1,3 +1,5 @@
+'use strict'
+
const debug = (
typeof process === 'object' &&
process.env &&
diff --git a/deps/npm/node_modules/semver/internal/identifiers.js b/deps/npm/node_modules/semver/internal/identifiers.js
index e612d0a3d83619..a4613dee7977f0 100644
--- a/deps/npm/node_modules/semver/internal/identifiers.js
+++ b/deps/npm/node_modules/semver/internal/identifiers.js
@@ -1,3 +1,5 @@
+'use strict'
+
const numeric = /^[0-9]+$/
const compareIdentifiers = (a, b) => {
const anum = numeric.test(a)
diff --git a/deps/npm/node_modules/semver/internal/lrucache.js b/deps/npm/node_modules/semver/internal/lrucache.js
index 6d89ec948d0f1f..b8bf5262a0505c 100644
--- a/deps/npm/node_modules/semver/internal/lrucache.js
+++ b/deps/npm/node_modules/semver/internal/lrucache.js
@@ -1,3 +1,5 @@
+'use strict'
+
class LRUCache {
constructor () {
this.max = 1000
diff --git a/deps/npm/node_modules/semver/internal/parse-options.js b/deps/npm/node_modules/semver/internal/parse-options.js
index 10d64ce06d3c59..5295454130d421 100644
--- a/deps/npm/node_modules/semver/internal/parse-options.js
+++ b/deps/npm/node_modules/semver/internal/parse-options.js
@@ -1,3 +1,5 @@
+'use strict'
+
// parse out just the options we care about
const looseOption = Object.freeze({ loose: true })
const emptyOpts = Object.freeze({ })
diff --git a/deps/npm/node_modules/semver/internal/re.js b/deps/npm/node_modules/semver/internal/re.js
index fd8920e7baa717..4758c58d424a9b 100644
--- a/deps/npm/node_modules/semver/internal/re.js
+++ b/deps/npm/node_modules/semver/internal/re.js
@@ -1,3 +1,5 @@
+'use strict'
+
const {
MAX_SAFE_COMPONENT_LENGTH,
MAX_SAFE_BUILD_LENGTH,
@@ -10,6 +12,7 @@ exports = module.exports = {}
const re = exports.re = []
const safeRe = exports.safeRe = []
const src = exports.src = []
+const safeSrc = exports.safeSrc = []
const t = exports.t = {}
let R = 0
@@ -42,6 +45,7 @@ const createToken = (name, value, isGlobal) => {
debug(name, index, value)
t[name] = index
src[index] = value
+ safeSrc[index] = safe
re[index] = new RegExp(value, isGlobal ? 'g' : undefined)
safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined)
}
@@ -74,12 +78,14 @@ createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
+// Non-numberic identifiers include numberic identifiers but can be longer.
+// Therefore non-numberic identifiers must go first.
-createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER]
-}|${src[t.NONNUMERICIDENTIFIER]})`)
+createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NONNUMERICIDENTIFIER]
+}|${src[t.NUMERICIDENTIFIER]})`)
-createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE]
-}|${src[t.NONNUMERICIDENTIFIER]})`)
+createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NONNUMERICIDENTIFIER]
+}|${src[t.NUMERICIDENTIFIERLOOSE]})`)
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 663d3701b7e6b0..1fbef5a9bf9cd8 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,20 +1,21 @@
{
"name": "semver",
- "version": "7.6.3",
+ "version": "7.7.2",
"description": "The semantic version parser used by npm.",
"main": "index.js",
"scripts": {
"test": "tap",
"snap": "tap",
- "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+ "lint": "npm run eslint",
"postlint": "template-oss-check",
- "lintfix": "npm run lint -- --fix",
+ "lintfix": "npm run eslint -- --fix",
"posttest": "npm run lint",
- "template-oss-apply": "template-oss-apply --force"
+ "template-oss-apply": "template-oss-apply --force",
+ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"devDependencies": {
- "@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.22.0",
+ "@npmcli/eslint-config": "^5.0.0",
+ "@npmcli/template-oss": "4.24.3",
"benchmark": "^2.1.4",
"tap": "^16.0.0"
},
@@ -51,7 +52,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.22.0",
+ "version": "4.24.3",
"engines": ">=10",
"distPaths": [
"classes/",
diff --git a/deps/npm/node_modules/semver/preload.js b/deps/npm/node_modules/semver/preload.js
index 947cd4f7917fff..e6c47b9b051d9b 100644
--- a/deps/npm/node_modules/semver/preload.js
+++ b/deps/npm/node_modules/semver/preload.js
@@ -1,2 +1,4 @@
+'use strict'
+
// XXX remove in v8 or beyond
module.exports = require('./index.js')
diff --git a/deps/npm/node_modules/semver/ranges/gtr.js b/deps/npm/node_modules/semver/ranges/gtr.js
index db7e35599dd566..0e7601f693554a 100644
--- a/deps/npm/node_modules/semver/ranges/gtr.js
+++ b/deps/npm/node_modules/semver/ranges/gtr.js
@@ -1,3 +1,5 @@
+'use strict'
+
// Determine if version is greater than all the versions possible in the range.
const outside = require('./outside')
const gtr = (version, range, options) => outside(version, range, '>', options)
diff --git a/deps/npm/node_modules/semver/ranges/intersects.js b/deps/npm/node_modules/semver/ranges/intersects.js
index e0e9b7ce000e47..917be7e4293d2b 100644
--- a/deps/npm/node_modules/semver/ranges/intersects.js
+++ b/deps/npm/node_modules/semver/ranges/intersects.js
@@ -1,3 +1,5 @@
+'use strict'
+
const Range = require('../classes/range')
const intersects = (r1, r2, options) => {
r1 = new Range(r1, options)
diff --git a/deps/npm/node_modules/semver/ranges/ltr.js b/deps/npm/node_modules/semver/ranges/ltr.js
index 528a885ebdfcdb..aa5e568ec279da 100644
--- a/deps/npm/node_modules/semver/ranges/ltr.js
+++ b/deps/npm/node_modules/semver/ranges/ltr.js
@@ -1,3 +1,5 @@
+'use strict'
+
const outside = require('./outside')
// Determine if version is less than all the versions possible in the range
const ltr = (version, range, options) => outside(version, range, '<', options)
diff --git a/deps/npm/node_modules/semver/ranges/max-satisfying.js b/deps/npm/node_modules/semver/ranges/max-satisfying.js
index 6e3d993c67860c..01fe5ae3837157 100644
--- a/deps/npm/node_modules/semver/ranges/max-satisfying.js
+++ b/deps/npm/node_modules/semver/ranges/max-satisfying.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const Range = require('../classes/range')
diff --git a/deps/npm/node_modules/semver/ranges/min-satisfying.js b/deps/npm/node_modules/semver/ranges/min-satisfying.js
index 9b60974e2253a0..af89c8ef432692 100644
--- a/deps/npm/node_modules/semver/ranges/min-satisfying.js
+++ b/deps/npm/node_modules/semver/ranges/min-satisfying.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const Range = require('../classes/range')
const minSatisfying = (versions, range, options) => {
diff --git a/deps/npm/node_modules/semver/ranges/min-version.js b/deps/npm/node_modules/semver/ranges/min-version.js
index 350e1f78368ea2..09a65aa36fd517 100644
--- a/deps/npm/node_modules/semver/ranges/min-version.js
+++ b/deps/npm/node_modules/semver/ranges/min-version.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const Range = require('../classes/range')
const gt = require('../functions/gt')
diff --git a/deps/npm/node_modules/semver/ranges/outside.js b/deps/npm/node_modules/semver/ranges/outside.js
index ae99b10a5b9e6a..ca7442120798ea 100644
--- a/deps/npm/node_modules/semver/ranges/outside.js
+++ b/deps/npm/node_modules/semver/ranges/outside.js
@@ -1,3 +1,5 @@
+'use strict'
+
const SemVer = require('../classes/semver')
const Comparator = require('../classes/comparator')
const { ANY } = Comparator
diff --git a/deps/npm/node_modules/semver/ranges/simplify.js b/deps/npm/node_modules/semver/ranges/simplify.js
index 618d5b62735518..262732e670d7df 100644
--- a/deps/npm/node_modules/semver/ranges/simplify.js
+++ b/deps/npm/node_modules/semver/ranges/simplify.js
@@ -1,3 +1,5 @@
+'use strict'
+
// given a set of versions and a range, create a "simplified" range
// that includes the same versions that the original range does
// If the original range is shorter than the simplified one, return that.
diff --git a/deps/npm/node_modules/semver/ranges/subset.js b/deps/npm/node_modules/semver/ranges/subset.js
index 1e5c26837c047d..2c49aef1be5e87 100644
--- a/deps/npm/node_modules/semver/ranges/subset.js
+++ b/deps/npm/node_modules/semver/ranges/subset.js
@@ -1,3 +1,5 @@
+'use strict'
+
const Range = require('../classes/range.js')
const Comparator = require('../classes/comparator.js')
const { ANY } = Comparator
diff --git a/deps/npm/node_modules/semver/ranges/to-comparators.js b/deps/npm/node_modules/semver/ranges/to-comparators.js
index 6c8bc7e6f15a40..5be251961acbdf 100644
--- a/deps/npm/node_modules/semver/ranges/to-comparators.js
+++ b/deps/npm/node_modules/semver/ranges/to-comparators.js
@@ -1,3 +1,5 @@
+'use strict'
+
const Range = require('../classes/range')
// Mostly just for testing and legacy API reasons
diff --git a/deps/npm/node_modules/semver/ranges/valid.js b/deps/npm/node_modules/semver/ranges/valid.js
index 365f35689d358b..cc6b0e9f68f95f 100644
--- a/deps/npm/node_modules/semver/ranges/valid.js
+++ b/deps/npm/node_modules/semver/ranges/valid.js
@@ -1,3 +1,5 @@
+'use strict'
+
const Range = require('../classes/range')
const validRange = (range, options) => {
try {
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.js b/deps/npm/node_modules/sigstore/dist/sigstore.js
index c45524bbe21c22..b82b6208d45dfa 100644
--- a/deps/npm/node_modules/sigstore/dist/sigstore.js
+++ b/deps/npm/node_modules/sigstore/dist/sigstore.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.sign = sign;
exports.attest = attest;
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
index ee5d2b92b801a5..61b062ae2b2128 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/bundle/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/bundle",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "Sigstore bundle type",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,7 +27,7 @@
"provenance": true
},
"dependencies": {
- "@sigstore/protobuf-specs": "^0.3.2"
+ "@sigstore/protobuf-specs": "^0.4.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
index ed32286ad88efd..34b1d12f2b44c8 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.toMessageSignatureBundle = toMessageSignatureBundle;
exports.toDSSEBundle = toDSSEBundle;
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js
index f467c9150c348f..436630cfbbf196 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/dist/util/index.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
/*
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
index fe05e8dc2d73ad..b1d60ea1fdce6c 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/sign/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/sign",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "Sigstore signing library",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,16 +27,16 @@
},
"devDependencies": {
"@sigstore/jest": "^0.0.0",
- "@sigstore/mock": "^0.8.0",
+ "@sigstore/mock": "^0.10.0",
"@sigstore/rekor-types": "^3.0.0",
"@types/make-fetch-happen": "^10.0.4",
"@types/promise-retry": "^1.1.6"
},
"dependencies": {
- "@sigstore/bundle": "^3.0.0",
+ "@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0",
- "@sigstore/protobuf-specs": "^0.3.2",
- "make-fetch-happen": "^14.0.1",
+ "@sigstore/protobuf-specs": "^0.4.0",
+ "make-fetch-happen": "^14.0.2",
"proc-log": "^5.0.0",
"promise-retry": "^2.0.1"
},
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
index a916de0e51e712..e9a66b123455e3 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/certificate.js
@@ -4,13 +4,10 @@ exports.CertificateChainVerifier = void 0;
exports.verifyCertificateChain = verifyCertificateChain;
const error_1 = require("../error");
const trust_1 = require("../trust");
-function verifyCertificateChain(leaf, certificateAuthorities) {
+function verifyCertificateChain(timestamp, leaf, certificateAuthorities) {
// Filter list of trusted CAs to those which are valid for the given
- // leaf certificate.
- const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, {
- start: leaf.notBefore,
- end: leaf.notAfter,
- });
+ // timestamp
+ const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, timestamp);
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
let error;
for (const ca of cas) {
@@ -18,6 +15,7 @@ function verifyCertificateChain(leaf, certificateAuthorities) {
const verifier = new CertificateChainVerifier({
trustedCerts: ca.certChain,
untrustedCert: leaf,
+ timestamp,
});
return verifier.verify();
}
@@ -41,12 +39,20 @@ class CertificateChainVerifier {
...opts.trustedCerts,
opts.untrustedCert,
]);
+ this.timestamp = opts.timestamp;
}
verify() {
// Construct certificate path from leaf to root
const certificatePath = this.sort();
// Perform validation checks on each certificate in the path
this.checkPath(certificatePath);
+ const validForDate = certificatePath.every((cert) => cert.validForDate(this.timestamp));
+ if (!validForDate) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'certificate is not valid or expired at the specified date',
+ });
+ }
// Return verified certificate path
return certificatePath;
}
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
index cc894aab95a5d5..c966ccb1e925ef 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/key/index.js
@@ -37,15 +37,10 @@ function verifyPublicKey(hint, timestamps, trustMaterial) {
}
function verifyCertificate(leaf, timestamps, trustMaterial) {
// Check that leaf certificate chains to a trusted CA
- const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities);
- // Check that ALL certificates are valid for ALL of the timestamps
- const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp)));
- if (!validForDate) {
- throw new error_1.VerificationError({
- code: 'CERTIFICATE_ERROR',
- message: 'certificate is not valid or expired at the specified date',
- });
- }
+ let path = [];
+ timestamps.forEach((timestamp) => {
+ path = (0, certificate_1.verifyCertificateChain)(timestamp, leaf, trustMaterial.certificateAuthorities);
+ });
return {
scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs),
signer: getSigner(path[0]),
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
index 70388cd06c52d6..0da4a3de8247fd 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/timestamp/tsa.js
@@ -8,10 +8,7 @@ const trust_1 = require("../trust");
function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
const signingTime = timestamp.signingTime;
// Filter for CAs which were valid at the time of signing
- timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, {
- start: signingTime,
- end: signingTime,
- });
+ timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, signingTime);
// Filter for CAs which match serial and issuer embedded in the timestamp
timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, {
serialNumber: timestamp.signerSerialNumber,
@@ -44,6 +41,7 @@ function verifyTimestampForCA(timestamp, data, ca) {
new certificate_1.CertificateChainVerifier({
untrustedCert: leaf,
trustedCerts: cas,
+ timestamp: signingTime,
}).verify();
}
catch (e) {
@@ -52,14 +50,6 @@ function verifyTimestampForCA(timestamp, data, ca) {
message: 'invalid certificate chain',
});
}
- // Check that all of the CA certs were valid at the time of signing
- const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime));
- if (!validAtSigningTime) {
- throw new error_1.VerificationError({
- code: 'TIMESTAMP_ERROR',
- message: 'timestamp was signed with an expired certificate',
- });
- }
// Check that the signing certificate's key can be used to verify the
// timestamp signature.
timestamp.verify(data, signingKey);
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
index 880a16cf1940ea..98bd25cd70e591 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/dist/trust/filter.js
@@ -2,9 +2,9 @@
Object.defineProperty(exports, "__esModule", { value: true });
exports.filterCertAuthorities = filterCertAuthorities;
exports.filterTLogAuthorities = filterTLogAuthorities;
-function filterCertAuthorities(certAuthorities, criteria) {
+function filterCertAuthorities(certAuthorities, timestamp) {
return certAuthorities.filter((ca) => {
- return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end);
+ return ca.validFor.start <= timestamp && ca.validFor.end >= timestamp;
});
}
// Filter the list of tlog instances to only those which match the given log
diff --git a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
index edf72b8bfd9680..62b84db7f91f4f 100644
--- a/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
+++ b/deps/npm/node_modules/sigstore/node_modules/@sigstore/verify/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/verify",
- "version": "2.0.0",
+ "version": "2.1.1",
"description": "Verification of Sigstore signatures",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -26,8 +26,8 @@
"provenance": true
},
"dependencies": {
- "@sigstore/protobuf-specs": "^0.3.2",
- "@sigstore/bundle": "^3.0.0",
+ "@sigstore/protobuf-specs": "^0.4.1",
+ "@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0"
},
"engines": {
diff --git a/deps/npm/node_modules/sigstore/package.json b/deps/npm/node_modules/sigstore/package.json
index 0f798a263657b4..dab40a8ea8fbc6 100644
--- a/deps/npm/node_modules/sigstore/package.json
+++ b/deps/npm/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
{
"name": "sigstore",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "code-signing for npm packages",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -29,17 +29,17 @@
"devDependencies": {
"@sigstore/rekor-types": "^3.0.0",
"@sigstore/jest": "^0.0.0",
- "@sigstore/mock": "^0.8.0",
+ "@sigstore/mock": "^0.10.0",
"@tufjs/repo-mock": "^3.0.1",
"@types/make-fetch-happen": "^10.0.4"
},
"dependencies": {
- "@sigstore/bundle": "^3.0.0",
+ "@sigstore/bundle": "^3.1.0",
"@sigstore/core": "^2.0.0",
- "@sigstore/protobuf-specs": "^0.3.2",
- "@sigstore/sign": "^3.0.0",
- "@sigstore/tuf": "^3.0.0",
- "@sigstore/verify": "^2.0.0"
+ "@sigstore/protobuf-specs": "^0.4.0",
+ "@sigstore/sign": "^3.1.0",
+ "@sigstore/tuf": "^3.1.0",
+ "@sigstore/verify": "^2.1.0"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
diff --git a/deps/npm/node_modules/socks-proxy-agent/dist/index.js b/deps/npm/node_modules/socks-proxy-agent/dist/index.js
index a9b5db2d61f573..15e06e8f431765 100644
--- a/deps/npm/node_modules/socks-proxy-agent/dist/index.js
+++ b/deps/npm/node_modules/socks-proxy-agent/dist/index.js
@@ -31,9 +31,21 @@ const socks_1 = require("socks");
const agent_base_1 = require("agent-base");
const debug_1 = __importDefault(require("debug"));
const dns = __importStar(require("dns"));
+const net = __importStar(require("net"));
const tls = __importStar(require("tls"));
const url_1 = require("url");
const debug = (0, debug_1.default)('socks-proxy-agent');
+const setServernameFromNonIpHost = (options) => {
+ if (options.servername === undefined &&
+ options.host &&
+ !net.isIP(options.host)) {
+ return {
+ ...options,
+ servername: options.host,
+ };
+ }
+ return options;
+};
function parseSocksURL(url) {
let lookup = false;
let type = 5;
@@ -149,11 +161,9 @@ class SocksProxyAgent extends agent_base_1.Agent {
// The proxy is connecting to a TLS server, so upgrade
// this socket connection to a TLS connection.
debug('Upgrading socket connection to TLS');
- const servername = opts.servername || opts.host;
const tlsSocket = tls.connect({
- ...omit(opts, 'host', 'path', 'port'),
+ ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'),
socket,
- servername,
});
tlsSocket.once('error', (error) => {
debug('Socket TLS error', error.message);
diff --git a/deps/npm/node_modules/socks-proxy-agent/package.json b/deps/npm/node_modules/socks-proxy-agent/package.json
index ae0e373fa77381..0f330a73106778 100644
--- a/deps/npm/node_modules/socks-proxy-agent/package.json
+++ b/deps/npm/node_modules/socks-proxy-agent/package.json
@@ -1,6 +1,6 @@
{
"name": "socks-proxy-agent",
- "version": "8.0.4",
+ "version": "8.0.5",
"description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -107,7 +107,7 @@
"socks5h"
],
"dependencies": {
- "agent-base": "^7.1.1",
+ "agent-base": "^7.1.2",
"debug": "^4.3.4",
"socks": "^2.8.3"
},
diff --git a/deps/npm/node_modules/socks/build/common/helpers.js b/deps/npm/node_modules/socks/build/common/helpers.js
index 1ae44e4159a155..58331c8659dfa8 100644
--- a/deps/npm/node_modules/socks/build/common/helpers.js
+++ b/deps/npm/node_modules/socks/build/common/helpers.js
@@ -130,7 +130,7 @@ function isValidTimeoutValue(value) {
function ipv4ToInt32(ip) {
const address = new ip_address_1.Address4(ip);
// Convert the IPv4 address parts to an integer
- return address.toArray().reduce((acc, part) => (acc << 8) + part, 0);
+ return address.toArray().reduce((acc, part) => (acc << 8) + part, 0) >>> 0;
}
exports.ipv4ToInt32 = ipv4ToInt32;
function int32ToIpv4(int32) {
diff --git a/deps/npm/node_modules/socks/package.json b/deps/npm/node_modules/socks/package.json
index 5cc2a6836072e5..02e4f14e00cdc0 100644
--- a/deps/npm/node_modules/socks/package.json
+++ b/deps/npm/node_modules/socks/package.json
@@ -1,7 +1,7 @@
{
"name": "socks",
"private": false,
- "version": "2.8.3",
+ "version": "2.8.5",
"description": "Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality.",
"main": "build/index.js",
"typings": "typings/index.d.ts",
diff --git a/deps/npm/node_modules/spdx-license-ids/index.json b/deps/npm/node_modules/spdx-license-ids/index.json
index f43d5016bd95ab..c1ae5520b18add 100644
--- a/deps/npm/node_modules/spdx-license-ids/index.json
+++ b/deps/npm/node_modules/spdx-license-ids/index.json
@@ -89,6 +89,7 @@
"Bitstream-Vera",
"BlueOak-1.0.0",
"Boehm-GC",
+ "Boehm-GC-without-fee",
"Borceux",
"Brian-Gladman-2-Clause",
"Brian-Gladman-3-Clause",
@@ -148,6 +149,8 @@
"CC-BY-SA-3.0-IGO",
"CC-BY-SA-4.0",
"CC-PDDC",
+ "CC-PDM-1.0",
+ "CC-SA-1.0",
"CC0-1.0",
"CDDL-1.0",
"CDDL-1.1",
@@ -198,6 +201,7 @@
"DRL-1.1",
"DSDP",
"DocBook-Schema",
+ "DocBook-Stylesheet",
"DocBook-XML",
"Dotseqn",
"ECL-1.0",
@@ -305,6 +309,7 @@
"Imlib2",
"Info-ZIP",
"Inner-Net-2.0",
+ "InnoSetup",
"Intel",
"Intel-ACPI",
"Interbase-1.0",
@@ -349,9 +354,11 @@
"Linux-man-pages-copyleft-2-para",
"Linux-man-pages-copyleft-var",
"Lucida-Bitmap-Fonts",
+ "MIPS",
"MIT",
"MIT-0",
"MIT-CMU",
+ "MIT-Click",
"MIT-Festival",
"MIT-Khronos-old",
"MIT-Modern-Variant",
@@ -502,6 +509,7 @@
"SISSL",
"SISSL-1.2",
"SL",
+ "SMAIL-GPL",
"SMLNJ",
"SMPPL",
"SNIA",
@@ -515,6 +523,7 @@
"SchemeReport",
"Sendmail",
"Sendmail-8.23",
+ "Sendmail-Open-Source-1.1",
"SimPL-2.0",
"Sleepycat",
"Soundex",
@@ -540,6 +549,8 @@
"TU-Berlin-1.0",
"TU-Berlin-2.0",
"TermReadKey",
+ "ThirdEye",
+ "TrustedQSL",
"UCAR",
"UCL-1.0",
"UMich-Merit",
@@ -583,6 +594,7 @@
"Zimbra-1.4",
"Zlib",
"any-OSI",
+ "any-OSI-perl-modules",
"bcrypt-Solar-Designer",
"blessing",
"bzip2-1.0.6",
@@ -599,6 +611,7 @@
"etalab-2.0",
"fwlw",
"gSOAP-1.3b",
+ "generic-xts",
"gnuplot",
"gtkbook",
"hdparm",
@@ -627,6 +640,7 @@
"threeparttable",
"ulem",
"w3m",
+ "wwl",
"xinetd",
"xkeyboard-config-Zinoviev",
"xlock",
diff --git a/deps/npm/node_modules/spdx-license-ids/package.json b/deps/npm/node_modules/spdx-license-ids/package.json
index 7ab34aab6b8b1d..9b02c267604590 100644
--- a/deps/npm/node_modules/spdx-license-ids/package.json
+++ b/deps/npm/node_modules/spdx-license-ids/package.json
@@ -1,14 +1,14 @@
{
"name": "spdx-license-ids",
- "version": "3.0.20",
+ "version": "3.0.21",
"description": "A list of SPDX license identifiers",
"repository": "jslicense/spdx-license-ids",
"author": "Shinnosuke Watanabe (https://github.com/shinnn)",
"license": "CC0-1.0",
"scripts": {
"build": "node build.js",
- "pretest": "eslint .",
"latest": "node latest.js",
+ "pretest": "npm run build",
"test": "node test.js"
},
"files": [
@@ -25,15 +25,5 @@
"json",
"array",
"oss"
- ],
- "devDependencies": {
- "@shinnn/eslint-config": "^7.0.0",
- "eslint": "^8.49.0",
- "eslint-formatter-codeframe": "^7.32.1",
- "rmfr": "^2.0.0",
- "tape": "^5.6.6"
- },
- "eslintConfig": {
- "extends": "@shinnn"
- }
+ ]
}
diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
similarity index 84%
rename from deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE
rename to deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
index 49f7efe431c9ea..ffce7383f53e7f 100644
--- a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
@@ -2,9 +2,9 @@ Minizlib was created by Isaac Z. Schlueter.
It is a derivative work of the Node.js project.
"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
+Copyright Isaac Z. Schlueter and Contributors
+Copyright Node.js contributors. All rights reserved.
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
diff --git a/deps/npm/node_modules/minizlib/constants.js b/deps/npm/node_modules/tar/node_modules/minizlib/constants.js
similarity index 100%
rename from deps/npm/node_modules/minizlib/constants.js
rename to deps/npm/node_modules/tar/node_modules/minizlib/constants.js
diff --git a/deps/npm/node_modules/minizlib/index.js b/deps/npm/node_modules/tar/node_modules/minizlib/index.js
similarity index 100%
rename from deps/npm/node_modules/minizlib/index.js
rename to deps/npm/node_modules/tar/node_modules/minizlib/index.js
diff --git a/deps/npm/node_modules/minizlib/node_modules/minipass/LICENSE b/deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
similarity index 100%
rename from deps/npm/node_modules/minizlib/node_modules/minipass/LICENSE
rename to deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
diff --git a/deps/npm/node_modules/minizlib/node_modules/minipass/index.js b/deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
similarity index 100%
rename from deps/npm/node_modules/minizlib/node_modules/minipass/index.js
rename to deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
diff --git a/deps/npm/node_modules/minizlib/node_modules/minipass/package.json b/deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
similarity index 100%
rename from deps/npm/node_modules/minizlib/node_modules/minipass/package.json
rename to deps/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/package.json b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
new file mode 100644
index 00000000000000..98825a549f3fdc
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
@@ -0,0 +1,42 @@
+{
+ "name": "minizlib",
+ "version": "2.1.2",
+ "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+ "main": "index.js",
+ "dependencies": {
+ "minipass": "^3.0.0",
+ "yallist": "^4.0.0"
+ },
+ "scripts": {
+ "test": "tap test/*.js --100 -J",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "postpublish": "git push origin --all; git push origin --tags"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minizlib.git"
+ },
+ "keywords": [
+ "zlib",
+ "gzip",
+ "gunzip",
+ "deflate",
+ "inflate",
+ "compression",
+ "zip",
+ "unzip"
+ ],
+ "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
+ "license": "MIT",
+ "devDependencies": {
+ "tap": "^14.6.9"
+ },
+ "files": [
+ "index.js",
+ "constants.js"
+ ],
+ "engines": {
+ "node": ">= 8"
+ }
+}
diff --git a/deps/npm/node_modules/tinyglobby/LICENSE b/deps/npm/node_modules/tinyglobby/LICENSE
new file mode 100644
index 00000000000000..8657364bb085e0
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 Madeline Gurriarán
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/tinyglobby/dist/index.d.mts b/deps/npm/node_modules/tinyglobby/dist/index.d.mts
new file mode 100644
index 00000000000000..d8b8ef7cf0516a
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/dist/index.d.mts
@@ -0,0 +1,46 @@
+//#region src/utils.d.ts
+
+declare const convertPathToPattern: (path: string) => string;
+declare const escapePath: (path: string) => string;
+// #endregion
+// #region isDynamicPattern
+/*
+Has a few minor differences with `fast-glob` for better accuracy:
+
+Doesn't necessarily return false on patterns that include `\\`.
+
+Returns true if the pattern includes parentheses,
+regardless of them representing one single pattern or not.
+
+Returns true for unfinished glob extensions i.e. `(h`, `+(h`.
+
+Returns true for unfinished brace expansions as long as they include `,` or `..`.
+*/
+declare function isDynamicPattern(pattern: string, options?: {
+ caseSensitiveMatch: boolean;
+}): boolean; //#endregion
+//#region src/index.d.ts
+
+// #endregion
+// #region log
+interface GlobOptions {
+ absolute?: boolean;
+ cwd?: string;
+ patterns?: string | string[];
+ ignore?: string | string[];
+ dot?: boolean;
+ deep?: number;
+ followSymbolicLinks?: boolean;
+ caseSensitiveMatch?: boolean;
+ expandDirectories?: boolean;
+ onlyDirectories?: boolean;
+ onlyFiles?: boolean;
+ debug?: boolean;
+}
+declare function glob(patterns: string | string[], options?: Omit): Promise;
+declare function glob(options: GlobOptions): Promise;
+declare function globSync(patterns: string | string[], options?: Omit): string[];
+declare function globSync(options: GlobOptions): string[];
+
+//#endregion
+export { GlobOptions, convertPathToPattern, escapePath, glob, globSync, isDynamicPattern };
\ No newline at end of file
diff --git a/deps/npm/node_modules/tinyglobby/dist/index.js b/deps/npm/node_modules/tinyglobby/dist/index.js
new file mode 100644
index 00000000000000..1e05d89e7ebf1d
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/dist/index.js
@@ -0,0 +1,267 @@
+//#region rolldown:runtime
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
+ key = keys[i];
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
+ get: ((k) => from[k]).bind(null, key),
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
+ });
+ }
+ return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
+ value: mod,
+ enumerable: true
+}) : target, mod));
+
+//#endregion
+const path = __toESM(require("path"));
+const fdir = __toESM(require("fdir"));
+const picomatch = __toESM(require("picomatch"));
+
+//#region src/utils.ts
+const ONLY_PARENT_DIRECTORIES = /^(\/?\.\.)+$/;
+function getPartialMatcher(patterns, options) {
+ const patternsCount = patterns.length;
+ const patternsParts = Array(patternsCount);
+ const regexes = Array(patternsCount);
+ for (let i = 0; i < patternsCount; i++) {
+ const parts = splitPattern(patterns[i]);
+ patternsParts[i] = parts;
+ const partsCount = parts.length;
+ const partRegexes = Array(partsCount);
+ for (let j = 0; j < partsCount; j++) partRegexes[j] = picomatch.default.makeRe(parts[j], options);
+ regexes[i] = partRegexes;
+ }
+ return (input) => {
+ const inputParts = input.split("/");
+ if (inputParts[0] === ".." && ONLY_PARENT_DIRECTORIES.test(input)) return true;
+ for (let i = 0; i < patterns.length; i++) {
+ const patternParts = patternsParts[i];
+ const regex = regexes[i];
+ const inputPatternCount = inputParts.length;
+ const minParts = Math.min(inputPatternCount, patternParts.length);
+ let j = 0;
+ while (j < minParts) {
+ const part = patternParts[j];
+ if (part.includes("/")) return true;
+ const match = regex[j].test(inputParts[j]);
+ if (!match) break;
+ if (part === "**") return true;
+ j++;
+ }
+ if (j === inputPatternCount) return true;
+ }
+ return false;
+ };
+}
+const splitPatternOptions = { parts: true };
+function splitPattern(path$2) {
+ var _result$parts;
+ const result = picomatch.default.scan(path$2, splitPatternOptions);
+ return ((_result$parts = result.parts) === null || _result$parts === void 0 ? void 0 : _result$parts.length) ? result.parts : [path$2];
+}
+const isWin = process.platform === "win32";
+const ESCAPED_WIN32_BACKSLASHES = /\\(?![()[\]{}!+@])/g;
+function convertPosixPathToPattern(path$2) {
+ return escapePosixPath(path$2);
+}
+function convertWin32PathToPattern(path$2) {
+ return escapeWin32Path(path$2).replace(ESCAPED_WIN32_BACKSLASHES, "/");
+}
+const convertPathToPattern = isWin ? convertWin32PathToPattern : convertPosixPathToPattern;
+const POSIX_UNESCAPED_GLOB_SYMBOLS = /(? path$2.replace(POSIX_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+const escapeWin32Path = (path$2) => path$2.replace(WIN32_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+const escapePath = isWin ? escapeWin32Path : escapePosixPath;
+function isDynamicPattern(pattern, options) {
+ if ((options === null || options === void 0 ? void 0 : options.caseSensitiveMatch) === false) return true;
+ const scan = picomatch.default.scan(pattern);
+ return scan.isGlob || scan.negated;
+}
+function log(...tasks) {
+ console.log(`[tinyglobby ${new Date().toLocaleTimeString("es")}]`, ...tasks);
+}
+
+//#endregion
+//#region src/index.ts
+const PARENT_DIRECTORY = /^(\/?\.\.)+/;
+const ESCAPING_BACKSLASHES = /\\(?=[()[\]{}!*+?@|])/g;
+const BACKSLASHES = /\\/g;
+function normalizePattern(pattern, expandDirectories, cwd, props, isIgnore) {
+ let result = pattern;
+ if (pattern.endsWith("/")) result = pattern.slice(0, -1);
+ if (!result.endsWith("*") && expandDirectories) result += "/**";
+ const escapedCwd = escapePath(cwd);
+ if (path.default.isAbsolute(result.replace(ESCAPING_BACKSLASHES, ""))) result = path.posix.relative(escapedCwd, result);
+ else result = path.posix.normalize(result);
+ const parentDirectoryMatch = PARENT_DIRECTORY.exec(result);
+ const parts = splitPattern(result);
+ if (parentDirectoryMatch === null || parentDirectoryMatch === void 0 ? void 0 : parentDirectoryMatch[0]) {
+ const n = (parentDirectoryMatch[0].length + 1) / 3;
+ let i = 0;
+ const cwdParts = escapedCwd.split("/");
+ while (i < n && parts[i + n] === cwdParts[cwdParts.length + i - n]) {
+ result = result.slice(0, (n - i - 1) * 3) + result.slice((n - i) * 3 + parts[i + n].length + 1) || ".";
+ i++;
+ }
+ const potentialRoot = path.posix.join(cwd, parentDirectoryMatch[0].slice(i * 3));
+ if (!potentialRoot.startsWith(".") && props.root.length > potentialRoot.length) {
+ props.root = potentialRoot;
+ props.depthOffset = -n + i;
+ }
+ }
+ if (!isIgnore && props.depthOffset >= 0) {
+ var _props$commonPath;
+ (_props$commonPath = props.commonPath) !== null && _props$commonPath !== void 0 || (props.commonPath = parts);
+ const newCommonPath = [];
+ const length = Math.min(props.commonPath.length, parts.length);
+ for (let i = 0; i < length; i++) {
+ const part = parts[i];
+ if (part === "**" && !parts[i + 1]) {
+ newCommonPath.pop();
+ break;
+ }
+ if (part !== props.commonPath[i] || isDynamicPattern(part) || i === parts.length - 1) break;
+ newCommonPath.push(part);
+ }
+ props.depthOffset = newCommonPath.length;
+ props.commonPath = newCommonPath;
+ props.root = newCommonPath.length > 0 ? path.default.posix.join(cwd, ...newCommonPath) : cwd;
+ }
+ return result;
+}
+function processPatterns({ patterns, ignore = [], expandDirectories = true }, cwd, props) {
+ if (typeof patterns === "string") patterns = [patterns];
+ else if (!patterns) patterns = ["**/*"];
+ if (typeof ignore === "string") ignore = [ignore];
+ const matchPatterns = [];
+ const ignorePatterns = [];
+ for (const pattern of ignore) {
+ if (!pattern) continue;
+ if (pattern[0] !== "!" || pattern[1] === "(") ignorePatterns.push(normalizePattern(pattern, expandDirectories, cwd, props, true));
+ }
+ for (const pattern of patterns) {
+ if (!pattern) continue;
+ if (pattern[0] !== "!" || pattern[1] === "(") matchPatterns.push(normalizePattern(pattern, expandDirectories, cwd, props, false));
+ else if (pattern[1] !== "!" || pattern[2] === "(") ignorePatterns.push(normalizePattern(pattern.slice(1), expandDirectories, cwd, props, true));
+ }
+ return {
+ match: matchPatterns,
+ ignore: ignorePatterns
+ };
+}
+function getRelativePath(path$2, cwd, root) {
+ return path.posix.relative(cwd, `${root}/${path$2}`) || ".";
+}
+function processPath(path$2, cwd, root, isDirectory, absolute) {
+ const relativePath = absolute ? path$2.slice(root === "/" ? 1 : root.length + 1) || "." : path$2;
+ if (root === cwd) return isDirectory && relativePath !== "." ? relativePath.slice(0, -1) : relativePath;
+ return getRelativePath(relativePath, cwd, root);
+}
+function formatPaths(paths, cwd, root) {
+ for (let i = paths.length - 1; i >= 0; i--) {
+ const path$2 = paths[i];
+ paths[i] = getRelativePath(path$2, cwd, root) + (!path$2 || path$2.endsWith("/") ? "/" : "");
+ }
+ return paths;
+}
+function crawl(options, cwd, sync) {
+ if (process.env.TINYGLOBBY_DEBUG) options.debug = true;
+ if (options.debug) log("globbing with options:", options, "cwd:", cwd);
+ if (Array.isArray(options.patterns) && options.patterns.length === 0) return sync ? [] : Promise.resolve([]);
+ const props = {
+ root: cwd,
+ commonPath: null,
+ depthOffset: 0
+ };
+ const processed = processPatterns(options, cwd, props);
+ const nocase = options.caseSensitiveMatch === false;
+ if (options.debug) log("internal processing patterns:", processed);
+ const matcher = (0, picomatch.default)(processed.match, {
+ dot: options.dot,
+ nocase,
+ ignore: processed.ignore
+ });
+ const ignore = (0, picomatch.default)(processed.ignore, {
+ dot: options.dot,
+ nocase
+ });
+ const partialMatcher = getPartialMatcher(processed.match, {
+ dot: options.dot,
+ nocase
+ });
+ const fdirOptions = {
+ filters: [options.debug ? (p, isDirectory) => {
+ const path$2 = processPath(p, cwd, props.root, isDirectory, options.absolute);
+ const matches = matcher(path$2);
+ if (matches) log(`matched ${path$2}`);
+ return matches;
+ } : (p, isDirectory) => matcher(processPath(p, cwd, props.root, isDirectory, options.absolute))],
+ exclude: options.debug ? (_, p) => {
+ const relativePath = processPath(p, cwd, props.root, true, true);
+ const skipped = relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
+ if (skipped) log(`skipped ${p}`);
+ else log(`crawling ${p}`);
+ return skipped;
+ } : (_, p) => {
+ const relativePath = processPath(p, cwd, props.root, true, true);
+ return relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
+ },
+ pathSeparator: "/",
+ relativePaths: true,
+ resolveSymlinks: true
+ };
+ if (options.deep !== void 0) fdirOptions.maxDepth = Math.round(options.deep - props.depthOffset);
+ if (options.absolute) {
+ fdirOptions.relativePaths = false;
+ fdirOptions.resolvePaths = true;
+ fdirOptions.includeBasePath = true;
+ }
+ if (options.followSymbolicLinks === false) {
+ fdirOptions.resolveSymlinks = false;
+ fdirOptions.excludeSymlinks = true;
+ }
+ if (options.onlyDirectories) {
+ fdirOptions.excludeFiles = true;
+ fdirOptions.includeDirs = true;
+ } else if (options.onlyFiles === false) fdirOptions.includeDirs = true;
+ props.root = props.root.replace(BACKSLASHES, "");
+ const root = props.root;
+ if (options.debug) log("internal properties:", props);
+ const api = new fdir.fdir(fdirOptions).crawl(root);
+ if (cwd === root || options.absolute) return sync ? api.sync() : api.withPromise();
+ return sync ? formatPaths(api.sync(), cwd, root) : api.withPromise().then((paths) => formatPaths(paths, cwd, root));
+}
+async function glob(patternsOrOptions, options) {
+ if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
+ const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
+ ...options,
+ patterns: patternsOrOptions
+ } : patternsOrOptions;
+ const cwd = opts.cwd ? path.default.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
+ return crawl(opts, cwd, false);
+}
+function globSync(patternsOrOptions, options) {
+ if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
+ const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
+ ...options,
+ patterns: patternsOrOptions
+ } : patternsOrOptions;
+ const cwd = opts.cwd ? path.default.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
+ return crawl(opts, cwd, true);
+}
+
+//#endregion
+exports.convertPathToPattern = convertPathToPattern;
+exports.escapePath = escapePath;
+exports.glob = glob;
+exports.globSync = globSync;
+exports.isDynamicPattern = isDynamicPattern;
\ No newline at end of file
diff --git a/deps/npm/node_modules/tinyglobby/dist/index.mjs b/deps/npm/node_modules/tinyglobby/dist/index.mjs
new file mode 100644
index 00000000000000..f04903f5b1a76b
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/dist/index.mjs
@@ -0,0 +1,240 @@
+import path, { posix } from "path";
+import { fdir } from "fdir";
+import picomatch from "picomatch";
+
+//#region src/utils.ts
+const ONLY_PARENT_DIRECTORIES = /^(\/?\.\.)+$/;
+function getPartialMatcher(patterns, options) {
+ const patternsCount = patterns.length;
+ const patternsParts = Array(patternsCount);
+ const regexes = Array(patternsCount);
+ for (let i = 0; i < patternsCount; i++) {
+ const parts = splitPattern(patterns[i]);
+ patternsParts[i] = parts;
+ const partsCount = parts.length;
+ const partRegexes = Array(partsCount);
+ for (let j = 0; j < partsCount; j++) partRegexes[j] = picomatch.makeRe(parts[j], options);
+ regexes[i] = partRegexes;
+ }
+ return (input) => {
+ const inputParts = input.split("/");
+ if (inputParts[0] === ".." && ONLY_PARENT_DIRECTORIES.test(input)) return true;
+ for (let i = 0; i < patterns.length; i++) {
+ const patternParts = patternsParts[i];
+ const regex = regexes[i];
+ const inputPatternCount = inputParts.length;
+ const minParts = Math.min(inputPatternCount, patternParts.length);
+ let j = 0;
+ while (j < minParts) {
+ const part = patternParts[j];
+ if (part.includes("/")) return true;
+ const match = regex[j].test(inputParts[j]);
+ if (!match) break;
+ if (part === "**") return true;
+ j++;
+ }
+ if (j === inputPatternCount) return true;
+ }
+ return false;
+ };
+}
+const splitPatternOptions = { parts: true };
+function splitPattern(path$1) {
+ var _result$parts;
+ const result = picomatch.scan(path$1, splitPatternOptions);
+ return ((_result$parts = result.parts) === null || _result$parts === void 0 ? void 0 : _result$parts.length) ? result.parts : [path$1];
+}
+const isWin = process.platform === "win32";
+const ESCAPED_WIN32_BACKSLASHES = /\\(?![()[\]{}!+@])/g;
+function convertPosixPathToPattern(path$1) {
+ return escapePosixPath(path$1);
+}
+function convertWin32PathToPattern(path$1) {
+ return escapeWin32Path(path$1).replace(ESCAPED_WIN32_BACKSLASHES, "/");
+}
+const convertPathToPattern = isWin ? convertWin32PathToPattern : convertPosixPathToPattern;
+const POSIX_UNESCAPED_GLOB_SYMBOLS = /(? path$1.replace(POSIX_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+const escapeWin32Path = (path$1) => path$1.replace(WIN32_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+const escapePath = isWin ? escapeWin32Path : escapePosixPath;
+function isDynamicPattern(pattern, options) {
+ if ((options === null || options === void 0 ? void 0 : options.caseSensitiveMatch) === false) return true;
+ const scan = picomatch.scan(pattern);
+ return scan.isGlob || scan.negated;
+}
+function log(...tasks) {
+ console.log(`[tinyglobby ${new Date().toLocaleTimeString("es")}]`, ...tasks);
+}
+
+//#endregion
+//#region src/index.ts
+const PARENT_DIRECTORY = /^(\/?\.\.)+/;
+const ESCAPING_BACKSLASHES = /\\(?=[()[\]{}!*+?@|])/g;
+const BACKSLASHES = /\\/g;
+function normalizePattern(pattern, expandDirectories, cwd, props, isIgnore) {
+ let result = pattern;
+ if (pattern.endsWith("/")) result = pattern.slice(0, -1);
+ if (!result.endsWith("*") && expandDirectories) result += "/**";
+ const escapedCwd = escapePath(cwd);
+ if (path.isAbsolute(result.replace(ESCAPING_BACKSLASHES, ""))) result = posix.relative(escapedCwd, result);
+ else result = posix.normalize(result);
+ const parentDirectoryMatch = PARENT_DIRECTORY.exec(result);
+ const parts = splitPattern(result);
+ if (parentDirectoryMatch === null || parentDirectoryMatch === void 0 ? void 0 : parentDirectoryMatch[0]) {
+ const n = (parentDirectoryMatch[0].length + 1) / 3;
+ let i = 0;
+ const cwdParts = escapedCwd.split("/");
+ while (i < n && parts[i + n] === cwdParts[cwdParts.length + i - n]) {
+ result = result.slice(0, (n - i - 1) * 3) + result.slice((n - i) * 3 + parts[i + n].length + 1) || ".";
+ i++;
+ }
+ const potentialRoot = posix.join(cwd, parentDirectoryMatch[0].slice(i * 3));
+ if (!potentialRoot.startsWith(".") && props.root.length > potentialRoot.length) {
+ props.root = potentialRoot;
+ props.depthOffset = -n + i;
+ }
+ }
+ if (!isIgnore && props.depthOffset >= 0) {
+ var _props$commonPath;
+ (_props$commonPath = props.commonPath) !== null && _props$commonPath !== void 0 || (props.commonPath = parts);
+ const newCommonPath = [];
+ const length = Math.min(props.commonPath.length, parts.length);
+ for (let i = 0; i < length; i++) {
+ const part = parts[i];
+ if (part === "**" && !parts[i + 1]) {
+ newCommonPath.pop();
+ break;
+ }
+ if (part !== props.commonPath[i] || isDynamicPattern(part) || i === parts.length - 1) break;
+ newCommonPath.push(part);
+ }
+ props.depthOffset = newCommonPath.length;
+ props.commonPath = newCommonPath;
+ props.root = newCommonPath.length > 0 ? path.posix.join(cwd, ...newCommonPath) : cwd;
+ }
+ return result;
+}
+function processPatterns({ patterns, ignore = [], expandDirectories = true }, cwd, props) {
+ if (typeof patterns === "string") patterns = [patterns];
+ else if (!patterns) patterns = ["**/*"];
+ if (typeof ignore === "string") ignore = [ignore];
+ const matchPatterns = [];
+ const ignorePatterns = [];
+ for (const pattern of ignore) {
+ if (!pattern) continue;
+ if (pattern[0] !== "!" || pattern[1] === "(") ignorePatterns.push(normalizePattern(pattern, expandDirectories, cwd, props, true));
+ }
+ for (const pattern of patterns) {
+ if (!pattern) continue;
+ if (pattern[0] !== "!" || pattern[1] === "(") matchPatterns.push(normalizePattern(pattern, expandDirectories, cwd, props, false));
+ else if (pattern[1] !== "!" || pattern[2] === "(") ignorePatterns.push(normalizePattern(pattern.slice(1), expandDirectories, cwd, props, true));
+ }
+ return {
+ match: matchPatterns,
+ ignore: ignorePatterns
+ };
+}
+function getRelativePath(path$1, cwd, root) {
+ return posix.relative(cwd, `${root}/${path$1}`) || ".";
+}
+function processPath(path$1, cwd, root, isDirectory, absolute) {
+ const relativePath = absolute ? path$1.slice(root === "/" ? 1 : root.length + 1) || "." : path$1;
+ if (root === cwd) return isDirectory && relativePath !== "." ? relativePath.slice(0, -1) : relativePath;
+ return getRelativePath(relativePath, cwd, root);
+}
+function formatPaths(paths, cwd, root) {
+ for (let i = paths.length - 1; i >= 0; i--) {
+ const path$1 = paths[i];
+ paths[i] = getRelativePath(path$1, cwd, root) + (!path$1 || path$1.endsWith("/") ? "/" : "");
+ }
+ return paths;
+}
+function crawl(options, cwd, sync) {
+ if (process.env.TINYGLOBBY_DEBUG) options.debug = true;
+ if (options.debug) log("globbing with options:", options, "cwd:", cwd);
+ if (Array.isArray(options.patterns) && options.patterns.length === 0) return sync ? [] : Promise.resolve([]);
+ const props = {
+ root: cwd,
+ commonPath: null,
+ depthOffset: 0
+ };
+ const processed = processPatterns(options, cwd, props);
+ const nocase = options.caseSensitiveMatch === false;
+ if (options.debug) log("internal processing patterns:", processed);
+ const matcher = picomatch(processed.match, {
+ dot: options.dot,
+ nocase,
+ ignore: processed.ignore
+ });
+ const ignore = picomatch(processed.ignore, {
+ dot: options.dot,
+ nocase
+ });
+ const partialMatcher = getPartialMatcher(processed.match, {
+ dot: options.dot,
+ nocase
+ });
+ const fdirOptions = {
+ filters: [options.debug ? (p, isDirectory) => {
+ const path$1 = processPath(p, cwd, props.root, isDirectory, options.absolute);
+ const matches = matcher(path$1);
+ if (matches) log(`matched ${path$1}`);
+ return matches;
+ } : (p, isDirectory) => matcher(processPath(p, cwd, props.root, isDirectory, options.absolute))],
+ exclude: options.debug ? (_, p) => {
+ const relativePath = processPath(p, cwd, props.root, true, true);
+ const skipped = relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
+ if (skipped) log(`skipped ${p}`);
+ else log(`crawling ${p}`);
+ return skipped;
+ } : (_, p) => {
+ const relativePath = processPath(p, cwd, props.root, true, true);
+ return relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
+ },
+ pathSeparator: "/",
+ relativePaths: true,
+ resolveSymlinks: true
+ };
+ if (options.deep !== void 0) fdirOptions.maxDepth = Math.round(options.deep - props.depthOffset);
+ if (options.absolute) {
+ fdirOptions.relativePaths = false;
+ fdirOptions.resolvePaths = true;
+ fdirOptions.includeBasePath = true;
+ }
+ if (options.followSymbolicLinks === false) {
+ fdirOptions.resolveSymlinks = false;
+ fdirOptions.excludeSymlinks = true;
+ }
+ if (options.onlyDirectories) {
+ fdirOptions.excludeFiles = true;
+ fdirOptions.includeDirs = true;
+ } else if (options.onlyFiles === false) fdirOptions.includeDirs = true;
+ props.root = props.root.replace(BACKSLASHES, "");
+ const root = props.root;
+ if (options.debug) log("internal properties:", props);
+ const api = new fdir(fdirOptions).crawl(root);
+ if (cwd === root || options.absolute) return sync ? api.sync() : api.withPromise();
+ return sync ? formatPaths(api.sync(), cwd, root) : api.withPromise().then((paths) => formatPaths(paths, cwd, root));
+}
+async function glob(patternsOrOptions, options) {
+ if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
+ const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
+ ...options,
+ patterns: patternsOrOptions
+ } : patternsOrOptions;
+ const cwd = opts.cwd ? path.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
+ return crawl(opts, cwd, false);
+}
+function globSync(patternsOrOptions, options) {
+ if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
+ const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
+ ...options,
+ patterns: patternsOrOptions
+ } : patternsOrOptions;
+ const cwd = opts.cwd ? path.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
+ return crawl(opts, cwd, true);
+}
+
+//#endregion
+export { convertPathToPattern, escapePath, glob, globSync, isDynamicPattern };
\ No newline at end of file
diff --git a/deps/npm/node_modules/clean-stack/license b/deps/npm/node_modules/tinyglobby/node_modules/fdir/LICENSE
similarity index 92%
rename from deps/npm/node_modules/clean-stack/license
rename to deps/npm/node_modules/tinyglobby/node_modules/fdir/LICENSE
index e7af2f77107d73..bb7fdee44cae62 100644
--- a/deps/npm/node_modules/clean-stack/license
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/LICENSE
@@ -1,6 +1,4 @@
-MIT License
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
+Copyright 2023 Abdullah Atta
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js
new file mode 100644
index 00000000000000..efc6649cb04e4b
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js
@@ -0,0 +1,19 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.callback = exports.promise = void 0;
+const walker_1 = require("./walker");
+function promise(root, options) {
+ return new Promise((resolve, reject) => {
+ callback(root, options, (err, output) => {
+ if (err)
+ return reject(err);
+ resolve(output);
+ });
+ });
+}
+exports.promise = promise;
+function callback(root, options, callback) {
+ let walker = new walker_1.Walker(root, options, callback);
+ walker.start();
+}
+exports.callback = callback;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js
new file mode 100644
index 00000000000000..685cb270b73e5a
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Counter = void 0;
+class Counter {
+ _files = 0;
+ _directories = 0;
+ set files(num) {
+ this._files = num;
+ }
+ get files() {
+ return this._files;
+ }
+ set directories(num) {
+ this._directories = num;
+ }
+ get directories() {
+ return this._directories;
+ }
+ /**
+ * @deprecated use `directories` instead
+ */
+ /* c8 ignore next 3 */
+ get dirs() {
+ return this._directories;
+ }
+}
+exports.Counter = Counter;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js
new file mode 100644
index 00000000000000..1e02308dfa6f2f
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js
@@ -0,0 +1,13 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const getArray = (paths) => {
+ return paths;
+};
+const getArrayGroup = () => {
+ return [""].slice(0, 0);
+};
+function build(options) {
+ return options.group ? getArrayGroup : getArray;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js
new file mode 100644
index 00000000000000..4ccaa1a481156b
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js
@@ -0,0 +1,11 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const groupFiles = (groups, directory, files) => {
+ groups.push({ directory, files, dir: directory });
+};
+const empty = () => { };
+function build(options) {
+ return options.group ? groupFiles : empty;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js
new file mode 100644
index 00000000000000..ed59ca2da78986
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js
@@ -0,0 +1,57 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const onlyCountsSync = (state) => {
+ return state.counts;
+};
+const groupsSync = (state) => {
+ return state.groups;
+};
+const defaultSync = (state) => {
+ return state.paths;
+};
+const limitFilesSync = (state) => {
+ return state.paths.slice(0, state.options.maxFiles);
+};
+const onlyCountsAsync = (state, error, callback) => {
+ report(error, callback, state.counts, state.options.suppressErrors);
+ return null;
+};
+const defaultAsync = (state, error, callback) => {
+ report(error, callback, state.paths, state.options.suppressErrors);
+ return null;
+};
+const limitFilesAsync = (state, error, callback) => {
+ report(error, callback, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
+ return null;
+};
+const groupsAsync = (state, error, callback) => {
+ report(error, callback, state.groups, state.options.suppressErrors);
+ return null;
+};
+function report(error, callback, output, suppressErrors) {
+ if (error && !suppressErrors)
+ callback(error, output);
+ else
+ callback(null, output);
+}
+function build(options, isSynchronous) {
+ const { onlyCounts, group, maxFiles } = options;
+ if (onlyCounts)
+ return isSynchronous
+ ? onlyCountsSync
+ : onlyCountsAsync;
+ else if (group)
+ return isSynchronous
+ ? groupsSync
+ : groupsAsync;
+ else if (maxFiles)
+ return isSynchronous
+ ? limitFilesSync
+ : limitFilesAsync;
+ else
+ return isSynchronous
+ ? defaultSync
+ : defaultAsync;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js
new file mode 100644
index 00000000000000..e84faf617734e3
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js
@@ -0,0 +1,36 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = exports.joinDirectoryPath = exports.joinPathWithBasePath = void 0;
+const path_1 = require("path");
+const utils_1 = require("../../utils");
+function joinPathWithBasePath(filename, directoryPath) {
+ return directoryPath + filename;
+}
+exports.joinPathWithBasePath = joinPathWithBasePath;
+function joinPathWithRelativePath(root, options) {
+ return function (filename, directoryPath) {
+ const sameRoot = directoryPath.startsWith(root);
+ if (sameRoot)
+ return directoryPath.replace(root, "") + filename;
+ else
+ return ((0, utils_1.convertSlashes)((0, path_1.relative)(root, directoryPath), options.pathSeparator) +
+ options.pathSeparator +
+ filename);
+ };
+}
+function joinPath(filename) {
+ return filename;
+}
+function joinDirectoryPath(filename, directoryPath, separator) {
+ return directoryPath + filename + separator;
+}
+exports.joinDirectoryPath = joinDirectoryPath;
+function build(root, options) {
+ const { relativePaths, includeBasePath } = options;
+ return relativePaths && root
+ ? joinPathWithRelativePath(root, options)
+ : includeBasePath
+ ? joinPathWithBasePath
+ : joinPath;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js
new file mode 100644
index 00000000000000..6858cb62532017
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js
@@ -0,0 +1,37 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+function pushDirectoryWithRelativePath(root) {
+ return function (directoryPath, paths) {
+ paths.push(directoryPath.substring(root.length) || ".");
+ };
+}
+function pushDirectoryFilterWithRelativePath(root) {
+ return function (directoryPath, paths, filters) {
+ const relativePath = directoryPath.substring(root.length) || ".";
+ if (filters.every((filter) => filter(relativePath, true))) {
+ paths.push(relativePath);
+ }
+ };
+}
+const pushDirectory = (directoryPath, paths) => {
+ paths.push(directoryPath || ".");
+};
+const pushDirectoryFilter = (directoryPath, paths, filters) => {
+ const path = directoryPath || ".";
+ if (filters.every((filter) => filter(path, true))) {
+ paths.push(path);
+ }
+};
+const empty = () => { };
+function build(root, options) {
+ const { includeDirs, filters, relativePaths } = options;
+ if (!includeDirs)
+ return empty;
+ if (relativePaths)
+ return filters && filters.length
+ ? pushDirectoryFilterWithRelativePath(root)
+ : pushDirectoryWithRelativePath(root);
+ return filters && filters.length ? pushDirectoryFilter : pushDirectory;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js
new file mode 100644
index 00000000000000..88843952946ad2
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js
@@ -0,0 +1,33 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
+ if (filters.every((filter) => filter(filename, false)))
+ counts.files++;
+};
+const pushFileFilter = (filename, paths, _counts, filters) => {
+ if (filters.every((filter) => filter(filename, false)))
+ paths.push(filename);
+};
+const pushFileCount = (_filename, _paths, counts, _filters) => {
+ counts.files++;
+};
+const pushFile = (filename, paths) => {
+ paths.push(filename);
+};
+const empty = () => { };
+function build(options) {
+ const { excludeFiles, filters, onlyCounts } = options;
+ if (excludeFiles)
+ return empty;
+ if (filters && filters.length) {
+ return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
+ }
+ else if (onlyCounts) {
+ return pushFileCount;
+ }
+ else {
+ return pushFile;
+ }
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js
new file mode 100644
index 00000000000000..dbf0720cd41f87
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js
@@ -0,0 +1,67 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const fs_1 = __importDefault(require("fs"));
+const path_1 = require("path");
+const resolveSymlinksAsync = function (path, state, callback) {
+ const { queue, options: { suppressErrors }, } = state;
+ queue.enqueue();
+ fs_1.default.realpath(path, (error, resolvedPath) => {
+ if (error)
+ return queue.dequeue(suppressErrors ? null : error, state);
+ fs_1.default.stat(resolvedPath, (error, stat) => {
+ if (error)
+ return queue.dequeue(suppressErrors ? null : error, state);
+ if (stat.isDirectory() && isRecursive(path, resolvedPath, state))
+ return queue.dequeue(null, state);
+ callback(stat, resolvedPath);
+ queue.dequeue(null, state);
+ });
+ });
+};
+const resolveSymlinks = function (path, state, callback) {
+ const { queue, options: { suppressErrors }, } = state;
+ queue.enqueue();
+ try {
+ const resolvedPath = fs_1.default.realpathSync(path);
+ const stat = fs_1.default.statSync(resolvedPath);
+ if (stat.isDirectory() && isRecursive(path, resolvedPath, state))
+ return;
+ callback(stat, resolvedPath);
+ }
+ catch (e) {
+ if (!suppressErrors)
+ throw e;
+ }
+};
+function build(options, isSynchronous) {
+ if (!options.resolveSymlinks || options.excludeSymlinks)
+ return null;
+ return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
+}
+exports.build = build;
+function isRecursive(path, resolved, state) {
+ if (state.options.useRealPaths)
+ return isRecursiveUsingRealPaths(resolved, state);
+ let parent = (0, path_1.dirname)(path);
+ let depth = 1;
+ while (parent !== state.root && depth < 2) {
+ const resolvedPath = state.symlinks.get(parent);
+ const isSameRoot = !!resolvedPath &&
+ (resolvedPath === resolved ||
+ resolvedPath.startsWith(resolved) ||
+ resolved.startsWith(resolvedPath));
+ if (isSameRoot)
+ depth++;
+ else
+ parent = (0, path_1.dirname)(parent);
+ }
+ state.symlinks.set(path, resolved);
+ return depth > 1;
+}
+function isRecursiveUsingRealPaths(resolved, state) {
+ return state.visited.includes(resolved + state.options.pathSeparator);
+}
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js
new file mode 100644
index 00000000000000..424302b6f9e144
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js
@@ -0,0 +1,40 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.build = void 0;
+const fs_1 = __importDefault(require("fs"));
+const readdirOpts = { withFileTypes: true };
+const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback) => {
+ state.queue.enqueue();
+ if (currentDepth < 0)
+ return state.queue.dequeue(null, state);
+ state.visited.push(crawlPath);
+ state.counts.directories++;
+ // Perf: Node >= 10 introduced withFileTypes that helps us
+ // skip an extra fs.stat call.
+ fs_1.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+ callback(entries, directoryPath, currentDepth);
+ state.queue.dequeue(state.options.suppressErrors ? null : error, state);
+ });
+};
+const walkSync = (state, crawlPath, directoryPath, currentDepth, callback) => {
+ if (currentDepth < 0)
+ return;
+ state.visited.push(crawlPath);
+ state.counts.directories++;
+ let entries = [];
+ try {
+ entries = fs_1.default.readdirSync(crawlPath || ".", readdirOpts);
+ }
+ catch (e) {
+ if (!state.options.suppressErrors)
+ throw e;
+ }
+ callback(entries, directoryPath, currentDepth);
+};
+function build(isSynchronous) {
+ return isSynchronous ? walkSync : walkAsync;
+}
+exports.build = build;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js
new file mode 100644
index 00000000000000..4708d422350af8
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Queue = void 0;
+/**
+ * This is a custom stateless queue to track concurrent async fs calls.
+ * It increments a counter whenever a call is queued and decrements it
+ * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+ */
+class Queue {
+ onQueueEmpty;
+ count = 0;
+ constructor(onQueueEmpty) {
+ this.onQueueEmpty = onQueueEmpty;
+ }
+ enqueue() {
+ this.count++;
+ return this.count;
+ }
+ dequeue(error, output) {
+ if (this.onQueueEmpty && (--this.count <= 0 || error)) {
+ this.onQueueEmpty(error, output);
+ if (error) {
+ output.controller.abort();
+ this.onQueueEmpty = undefined;
+ }
+ }
+ }
+}
+exports.Queue = Queue;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js
new file mode 100644
index 00000000000000..073bc88d212bef
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js
@@ -0,0 +1,9 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = void 0;
+const walker_1 = require("./walker");
+function sync(root, options) {
+ const walker = new walker_1.Walker(root, options);
+ return walker.start();
+}
+exports.sync = sync;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js
new file mode 100644
index 00000000000000..19e913785956f7
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js
@@ -0,0 +1,129 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Walker = void 0;
+const path_1 = require("path");
+const utils_1 = require("../utils");
+const joinPath = __importStar(require("./functions/join-path"));
+const pushDirectory = __importStar(require("./functions/push-directory"));
+const pushFile = __importStar(require("./functions/push-file"));
+const getArray = __importStar(require("./functions/get-array"));
+const groupFiles = __importStar(require("./functions/group-files"));
+const resolveSymlink = __importStar(require("./functions/resolve-symlink"));
+const invokeCallback = __importStar(require("./functions/invoke-callback"));
+const walkDirectory = __importStar(require("./functions/walk-directory"));
+const queue_1 = require("./queue");
+const counter_1 = require("./counter");
+class Walker {
+ root;
+ isSynchronous;
+ state;
+ joinPath;
+ pushDirectory;
+ pushFile;
+ getArray;
+ groupFiles;
+ resolveSymlink;
+ walkDirectory;
+ callbackInvoker;
+ constructor(root, options, callback) {
+ this.isSynchronous = !callback;
+ this.callbackInvoker = invokeCallback.build(options, this.isSynchronous);
+ this.root = (0, utils_1.normalizePath)(root, options);
+ this.state = {
+ root: (0, utils_1.isRootDirectory)(this.root) ? this.root : this.root.slice(0, -1),
+ // Perf: we explicitly tell the compiler to optimize for String arrays
+ paths: [""].slice(0, 0),
+ groups: [],
+ counts: new counter_1.Counter(),
+ options,
+ queue: new queue_1.Queue((error, state) => this.callbackInvoker(state, error, callback)),
+ symlinks: new Map(),
+ visited: [""].slice(0, 0),
+ controller: new AbortController(),
+ };
+ /*
+ * Perf: We conditionally change functions according to options. This gives a slight
+ * performance boost. Since these functions are so small, they are automatically inlined
+ * by the javascript engine so there's no function call overhead (in most cases).
+ */
+ this.joinPath = joinPath.build(this.root, options);
+ this.pushDirectory = pushDirectory.build(this.root, options);
+ this.pushFile = pushFile.build(options);
+ this.getArray = getArray.build(options);
+ this.groupFiles = groupFiles.build(options);
+ this.resolveSymlink = resolveSymlink.build(options, this.isSynchronous);
+ this.walkDirectory = walkDirectory.build(this.isSynchronous);
+ }
+ start() {
+ this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
+ this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
+ return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
+ }
+ walk = (entries, directoryPath, depth) => {
+ const { paths, options: { filters, resolveSymlinks, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator, }, controller, } = this.state;
+ if (controller.signal.aborted ||
+ (signal && signal.aborted) ||
+ (maxFiles && paths.length > maxFiles))
+ return;
+ const files = this.getArray(this.state.paths);
+ for (let i = 0; i < entries.length; ++i) {
+ const entry = entries[i];
+ if (entry.isFile() ||
+ (entry.isSymbolicLink() && !resolveSymlinks && !excludeSymlinks)) {
+ const filename = this.joinPath(entry.name, directoryPath);
+ this.pushFile(filename, files, this.state.counts, filters);
+ }
+ else if (entry.isDirectory()) {
+ let path = joinPath.joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
+ if (exclude && exclude(entry.name, path))
+ continue;
+ this.pushDirectory(path, paths, filters);
+ this.walkDirectory(this.state, path, path, depth - 1, this.walk);
+ }
+ else if (this.resolveSymlink && entry.isSymbolicLink()) {
+ let path = joinPath.joinPathWithBasePath(entry.name, directoryPath);
+ this.resolveSymlink(path, this.state, (stat, resolvedPath) => {
+ if (stat.isDirectory()) {
+ resolvedPath = (0, utils_1.normalizePath)(resolvedPath, this.state.options);
+ if (exclude &&
+ exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator))
+ return;
+ this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk);
+ }
+ else {
+ resolvedPath = useRealPaths ? resolvedPath : path;
+ const filename = (0, path_1.basename)(resolvedPath);
+ const directoryPath = (0, utils_1.normalizePath)((0, path_1.dirname)(resolvedPath), this.state.options);
+ resolvedPath = this.joinPath(filename, directoryPath);
+ this.pushFile(resolvedPath, files, this.state.counts, filters);
+ }
+ });
+ }
+ }
+ this.groupFiles(this.state.groups, directoryPath, files);
+ };
+}
+exports.Walker = Walker;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js
new file mode 100644
index 00000000000000..0538e6fabfb496
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js
@@ -0,0 +1,23 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.APIBuilder = void 0;
+const async_1 = require("../api/async");
+const sync_1 = require("../api/sync");
+class APIBuilder {
+ root;
+ options;
+ constructor(root, options) {
+ this.root = root;
+ this.options = options;
+ }
+ withPromise() {
+ return (0, async_1.promise)(this.root, this.options);
+ }
+ withCallback(cb) {
+ (0, async_1.callback)(this.root, this.options, cb);
+ }
+ sync() {
+ return (0, sync_1.sync)(this.root, this.options);
+ }
+}
+exports.APIBuilder = APIBuilder;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js
new file mode 100644
index 00000000000000..7f99aece6a3486
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js
@@ -0,0 +1,136 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Builder = void 0;
+const path_1 = require("path");
+const api_builder_1 = require("./api-builder");
+var pm = null;
+/* c8 ignore next 6 */
+try {
+ require.resolve("picomatch");
+ pm = require("picomatch");
+}
+catch (_e) {
+ // do nothing
+}
+class Builder {
+ globCache = {};
+ options = {
+ maxDepth: Infinity,
+ suppressErrors: true,
+ pathSeparator: path_1.sep,
+ filters: [],
+ };
+ globFunction;
+ constructor(options) {
+ this.options = { ...this.options, ...options };
+ this.globFunction = this.options.globFunction;
+ }
+ group() {
+ this.options.group = true;
+ return this;
+ }
+ withPathSeparator(separator) {
+ this.options.pathSeparator = separator;
+ return this;
+ }
+ withBasePath() {
+ this.options.includeBasePath = true;
+ return this;
+ }
+ withRelativePaths() {
+ this.options.relativePaths = true;
+ return this;
+ }
+ withDirs() {
+ this.options.includeDirs = true;
+ return this;
+ }
+ withMaxDepth(depth) {
+ this.options.maxDepth = depth;
+ return this;
+ }
+ withMaxFiles(limit) {
+ this.options.maxFiles = limit;
+ return this;
+ }
+ withFullPaths() {
+ this.options.resolvePaths = true;
+ this.options.includeBasePath = true;
+ return this;
+ }
+ withErrors() {
+ this.options.suppressErrors = false;
+ return this;
+ }
+ withSymlinks({ resolvePaths = true } = {}) {
+ this.options.resolveSymlinks = true;
+ this.options.useRealPaths = resolvePaths;
+ return this.withFullPaths();
+ }
+ withAbortSignal(signal) {
+ this.options.signal = signal;
+ return this;
+ }
+ normalize() {
+ this.options.normalizePath = true;
+ return this;
+ }
+ filter(predicate) {
+ this.options.filters.push(predicate);
+ return this;
+ }
+ onlyDirs() {
+ this.options.excludeFiles = true;
+ this.options.includeDirs = true;
+ return this;
+ }
+ exclude(predicate) {
+ this.options.exclude = predicate;
+ return this;
+ }
+ onlyCounts() {
+ this.options.onlyCounts = true;
+ return this;
+ }
+ crawl(root) {
+ return new api_builder_1.APIBuilder(root || ".", this.options);
+ }
+ withGlobFunction(fn) {
+ // cast this since we don't have the new type params yet
+ this.globFunction = fn;
+ return this;
+ }
+ /**
+ * @deprecated Pass options using the constructor instead:
+ * ```ts
+ * new fdir(options).crawl("/path/to/root");
+ * ```
+ * This method will be removed in v7.0
+ */
+ /* c8 ignore next 4 */
+ crawlWithOptions(root, options) {
+ this.options = { ...this.options, ...options };
+ return new api_builder_1.APIBuilder(root || ".", this.options);
+ }
+ glob(...patterns) {
+ if (this.globFunction) {
+ return this.globWithOptions(patterns);
+ }
+ return this.globWithOptions(patterns, ...[{ dot: true }]);
+ }
+ globWithOptions(patterns, ...options) {
+ const globFn = (this.globFunction || pm);
+ /* c8 ignore next 5 */
+ if (!globFn) {
+ throw new Error("Please specify a glob function to use glob matching.");
+ }
+ var isMatch = this.globCache[patterns.join("\0")];
+ if (!isMatch) {
+ isMatch = globFn(patterns, ...options);
+ this.globCache[patterns.join("\0")] = isMatch;
+ }
+ this.options.filters.push((path) => isMatch(path));
+ return this;
+ }
+}
+exports.Builder = Builder;
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs
new file mode 100644
index 00000000000000..83e724896ff821
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs
@@ -0,0 +1,572 @@
+//#region rolldown:runtime
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
+ key = keys[i];
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
+ get: ((k) => from[k]).bind(null, key),
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
+ });
+ }
+ return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
+ value: mod,
+ enumerable: true
+}) : target, mod));
+
+//#endregion
+const path = __toESM(require("path"));
+const fs = __toESM(require("fs"));
+
+//#region src/utils.ts
+function cleanPath(path$1) {
+ let normalized = (0, path.normalize)(path$1);
+ if (normalized.length > 1 && normalized[normalized.length - 1] === path.sep) normalized = normalized.substring(0, normalized.length - 1);
+ return normalized;
+}
+const SLASHES_REGEX = /[\\/]/g;
+function convertSlashes(path$1, separator) {
+ return path$1.replace(SLASHES_REGEX, separator);
+}
+const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
+function isRootDirectory(path$1) {
+ return path$1 === "/" || WINDOWS_ROOT_DIR_REGEX.test(path$1);
+}
+function normalizePath(path$1, options) {
+ const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options;
+ const pathNeedsCleaning = process.platform === "win32" && path$1.includes("/") || path$1.startsWith(".");
+ if (resolvePaths) path$1 = (0, path.resolve)(path$1);
+ if (normalizePath$1 || pathNeedsCleaning) path$1 = cleanPath(path$1);
+ if (path$1 === ".") return "";
+ const needsSeperator = path$1[path$1.length - 1] !== pathSeparator;
+ return convertSlashes(needsSeperator ? path$1 + pathSeparator : path$1, pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/join-path.ts
+function joinPathWithBasePath(filename, directoryPath) {
+ return directoryPath + filename;
+}
+function joinPathWithRelativePath(root, options) {
+ return function(filename, directoryPath) {
+ const sameRoot = directoryPath.startsWith(root);
+ if (sameRoot) return directoryPath.replace(root, "") + filename;
+ else return convertSlashes((0, path.relative)(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
+ };
+}
+function joinPath(filename) {
+ return filename;
+}
+function joinDirectoryPath(filename, directoryPath, separator) {
+ return directoryPath + filename + separator;
+}
+function build$7(root, options) {
+ const { relativePaths, includeBasePath } = options;
+ return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath;
+}
+
+//#endregion
+//#region src/api/functions/push-directory.ts
+function pushDirectoryWithRelativePath(root) {
+ return function(directoryPath, paths) {
+ paths.push(directoryPath.substring(root.length) || ".");
+ };
+}
+function pushDirectoryFilterWithRelativePath(root) {
+ return function(directoryPath, paths, filters) {
+ const relativePath = directoryPath.substring(root.length) || ".";
+ if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath);
+ };
+}
+const pushDirectory = (directoryPath, paths) => {
+ paths.push(directoryPath || ".");
+};
+const pushDirectoryFilter = (directoryPath, paths, filters) => {
+ const path$1 = directoryPath || ".";
+ if (filters.every((filter) => filter(path$1, true))) paths.push(path$1);
+};
+const empty$2 = () => {};
+function build$6(root, options) {
+ const { includeDirs, filters, relativePaths } = options;
+ if (!includeDirs) return empty$2;
+ if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root);
+ return filters && filters.length ? pushDirectoryFilter : pushDirectory;
+}
+
+//#endregion
+//#region src/api/functions/push-file.ts
+const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
+ if (filters.every((filter) => filter(filename, false))) counts.files++;
+};
+const pushFileFilter = (filename, paths, _counts, filters) => {
+ if (filters.every((filter) => filter(filename, false))) paths.push(filename);
+};
+const pushFileCount = (_filename, _paths, counts, _filters) => {
+ counts.files++;
+};
+const pushFile = (filename, paths) => {
+ paths.push(filename);
+};
+const empty$1 = () => {};
+function build$5(options) {
+ const { excludeFiles, filters, onlyCounts } = options;
+ if (excludeFiles) return empty$1;
+ if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
+ else if (onlyCounts) return pushFileCount;
+ else return pushFile;
+}
+
+//#endregion
+//#region src/api/functions/get-array.ts
+const getArray = (paths) => {
+ return paths;
+};
+const getArrayGroup = () => {
+ return [""].slice(0, 0);
+};
+function build$4(options) {
+ return options.group ? getArrayGroup : getArray;
+}
+
+//#endregion
+//#region src/api/functions/group-files.ts
+const groupFiles = (groups, directory, files) => {
+ groups.push({
+ directory,
+ files,
+ dir: directory
+ });
+};
+const empty = () => {};
+function build$3(options) {
+ return options.group ? groupFiles : empty;
+}
+
+//#endregion
+//#region src/api/functions/resolve-symlink.ts
+const resolveSymlinksAsync = function(path$1, state, callback$1) {
+ const { queue, options: { suppressErrors } } = state;
+ queue.enqueue();
+ fs.default.realpath(path$1, (error, resolvedPath) => {
+ if (error) return queue.dequeue(suppressErrors ? null : error, state);
+ fs.default.stat(resolvedPath, (error$1, stat) => {
+ if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
+ if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state);
+ callback$1(stat, resolvedPath);
+ queue.dequeue(null, state);
+ });
+ });
+};
+const resolveSymlinks = function(path$1, state, callback$1) {
+ const { queue, options: { suppressErrors } } = state;
+ queue.enqueue();
+ try {
+ const resolvedPath = fs.default.realpathSync(path$1);
+ const stat = fs.default.statSync(resolvedPath);
+ if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return;
+ callback$1(stat, resolvedPath);
+ } catch (e) {
+ if (!suppressErrors) throw e;
+ }
+};
+function build$2(options, isSynchronous) {
+ if (!options.resolveSymlinks || options.excludeSymlinks) return null;
+ return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
+}
+function isRecursive(path$1, resolved, state) {
+ if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state);
+ let parent = (0, path.dirname)(path$1);
+ let depth = 1;
+ while (parent !== state.root && depth < 2) {
+ const resolvedPath = state.symlinks.get(parent);
+ const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath));
+ if (isSameRoot) depth++;
+ else parent = (0, path.dirname)(parent);
+ }
+ state.symlinks.set(path$1, resolved);
+ return depth > 1;
+}
+function isRecursiveUsingRealPaths(resolved, state) {
+ return state.visited.includes(resolved + state.options.pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/invoke-callback.ts
+const onlyCountsSync = (state) => {
+ return state.counts;
+};
+const groupsSync = (state) => {
+ return state.groups;
+};
+const defaultSync = (state) => {
+ return state.paths;
+};
+const limitFilesSync = (state) => {
+ return state.paths.slice(0, state.options.maxFiles);
+};
+const onlyCountsAsync = (state, error, callback$1) => {
+ report(error, callback$1, state.counts, state.options.suppressErrors);
+ return null;
+};
+const defaultAsync = (state, error, callback$1) => {
+ report(error, callback$1, state.paths, state.options.suppressErrors);
+ return null;
+};
+const limitFilesAsync = (state, error, callback$1) => {
+ report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
+ return null;
+};
+const groupsAsync = (state, error, callback$1) => {
+ report(error, callback$1, state.groups, state.options.suppressErrors);
+ return null;
+};
+function report(error, callback$1, output, suppressErrors) {
+ if (error && !suppressErrors) callback$1(error, output);
+ else callback$1(null, output);
+}
+function build$1(options, isSynchronous) {
+ const { onlyCounts, group, maxFiles } = options;
+ if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync;
+ else if (group) return isSynchronous ? groupsSync : groupsAsync;
+ else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync;
+ else return isSynchronous ? defaultSync : defaultAsync;
+}
+
+//#endregion
+//#region src/api/functions/walk-directory.ts
+const readdirOpts = { withFileTypes: true };
+const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+ state.queue.enqueue();
+ if (currentDepth <= 0) return state.queue.dequeue(null, state);
+ state.visited.push(crawlPath);
+ state.counts.directories++;
+ fs.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+ callback$1(entries, directoryPath, currentDepth);
+ state.queue.dequeue(state.options.suppressErrors ? null : error, state);
+ });
+};
+const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+ if (currentDepth <= 0) return;
+ state.visited.push(crawlPath);
+ state.counts.directories++;
+ let entries = [];
+ try {
+ entries = fs.default.readdirSync(crawlPath || ".", readdirOpts);
+ } catch (e) {
+ if (!state.options.suppressErrors) throw e;
+ }
+ callback$1(entries, directoryPath, currentDepth);
+};
+function build(isSynchronous) {
+ return isSynchronous ? walkSync : walkAsync;
+}
+
+//#endregion
+//#region src/api/queue.ts
+/**
+* This is a custom stateless queue to track concurrent async fs calls.
+* It increments a counter whenever a call is queued and decrements it
+* as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+*/
+var Queue = class {
+ count = 0;
+ constructor(onQueueEmpty) {
+ this.onQueueEmpty = onQueueEmpty;
+ }
+ enqueue() {
+ this.count++;
+ return this.count;
+ }
+ dequeue(error, output) {
+ if (this.onQueueEmpty && (--this.count <= 0 || error)) {
+ this.onQueueEmpty(error, output);
+ if (error) {
+ output.controller.abort();
+ this.onQueueEmpty = void 0;
+ }
+ }
+ }
+};
+
+//#endregion
+//#region src/api/counter.ts
+var Counter = class {
+ _files = 0;
+ _directories = 0;
+ set files(num) {
+ this._files = num;
+ }
+ get files() {
+ return this._files;
+ }
+ set directories(num) {
+ this._directories = num;
+ }
+ get directories() {
+ return this._directories;
+ }
+ /**
+ * @deprecated use `directories` instead
+ */
+ /* c8 ignore next 3 */
+ get dirs() {
+ return this._directories;
+ }
+};
+
+//#endregion
+//#region src/api/walker.ts
+var Walker = class {
+ root;
+ isSynchronous;
+ state;
+ joinPath;
+ pushDirectory;
+ pushFile;
+ getArray;
+ groupFiles;
+ resolveSymlink;
+ walkDirectory;
+ callbackInvoker;
+ constructor(root, options, callback$1) {
+ this.isSynchronous = !callback$1;
+ this.callbackInvoker = build$1(options, this.isSynchronous);
+ this.root = normalizePath(root, options);
+ this.state = {
+ root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1),
+ paths: [""].slice(0, 0),
+ groups: [],
+ counts: new Counter(),
+ options,
+ queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
+ symlinks: /* @__PURE__ */ new Map(),
+ visited: [""].slice(0, 0),
+ controller: new AbortController()
+ };
+ this.joinPath = build$7(this.root, options);
+ this.pushDirectory = build$6(this.root, options);
+ this.pushFile = build$5(options);
+ this.getArray = build$4(options);
+ this.groupFiles = build$3(options);
+ this.resolveSymlink = build$2(options, this.isSynchronous);
+ this.walkDirectory = build(this.isSynchronous);
+ }
+ start() {
+ this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
+ this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
+ return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
+ }
+ walk = (entries, directoryPath, depth) => {
+ const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
+ if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+ const files = this.getArray(this.state.paths);
+ for (let i = 0; i < entries.length; ++i) {
+ const entry = entries[i];
+ if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) {
+ const filename = this.joinPath(entry.name, directoryPath);
+ this.pushFile(filename, files, this.state.counts, filters);
+ } else if (entry.isDirectory()) {
+ let path$1 = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
+ if (exclude && exclude(entry.name, path$1)) continue;
+ this.pushDirectory(path$1, paths, filters);
+ this.walkDirectory(this.state, path$1, path$1, depth - 1, this.walk);
+ } else if (this.resolveSymlink && entry.isSymbolicLink()) {
+ let path$1 = joinPathWithBasePath(entry.name, directoryPath);
+ this.resolveSymlink(path$1, this.state, (stat, resolvedPath) => {
+ if (stat.isDirectory()) {
+ resolvedPath = normalizePath(resolvedPath, this.state.options);
+ if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path$1 + pathSeparator)) return;
+ this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path$1 + pathSeparator, depth - 1, this.walk);
+ } else {
+ resolvedPath = useRealPaths ? resolvedPath : path$1;
+ const filename = (0, path.basename)(resolvedPath);
+ const directoryPath$1 = normalizePath((0, path.dirname)(resolvedPath), this.state.options);
+ resolvedPath = this.joinPath(filename, directoryPath$1);
+ this.pushFile(resolvedPath, files, this.state.counts, filters);
+ }
+ });
+ }
+ }
+ this.groupFiles(this.state.groups, directoryPath, files);
+ };
+};
+
+//#endregion
+//#region src/api/async.ts
+function promise(root, options) {
+ return new Promise((resolve$1, reject) => {
+ callback(root, options, (err, output) => {
+ if (err) return reject(err);
+ resolve$1(output);
+ });
+ });
+}
+function callback(root, options, callback$1) {
+ let walker = new Walker(root, options, callback$1);
+ walker.start();
+}
+
+//#endregion
+//#region src/api/sync.ts
+function sync(root, options) {
+ const walker = new Walker(root, options);
+ return walker.start();
+}
+
+//#endregion
+//#region src/builder/api-builder.ts
+var APIBuilder = class {
+ constructor(root, options) {
+ this.root = root;
+ this.options = options;
+ }
+ withPromise() {
+ return promise(this.root, this.options);
+ }
+ withCallback(cb) {
+ callback(this.root, this.options, cb);
+ }
+ sync() {
+ return sync(this.root, this.options);
+ }
+};
+
+//#endregion
+//#region src/builder/index.ts
+var pm = null;
+/* c8 ignore next 6 */
+try {
+ require.resolve("picomatch");
+ pm = require("picomatch");
+} catch (_e) {}
+var Builder = class {
+ globCache = {};
+ options = {
+ maxDepth: Infinity,
+ suppressErrors: true,
+ pathSeparator: path.sep,
+ filters: []
+ };
+ globFunction;
+ constructor(options) {
+ this.options = {
+ ...this.options,
+ ...options
+ };
+ this.globFunction = this.options.globFunction;
+ }
+ group() {
+ this.options.group = true;
+ return this;
+ }
+ withPathSeparator(separator) {
+ this.options.pathSeparator = separator;
+ return this;
+ }
+ withBasePath() {
+ this.options.includeBasePath = true;
+ return this;
+ }
+ withRelativePaths() {
+ this.options.relativePaths = true;
+ return this;
+ }
+ withDirs() {
+ this.options.includeDirs = true;
+ return this;
+ }
+ withMaxDepth(depth) {
+ this.options.maxDepth = depth;
+ return this;
+ }
+ withMaxFiles(limit) {
+ this.options.maxFiles = limit;
+ return this;
+ }
+ withFullPaths() {
+ this.options.resolvePaths = true;
+ this.options.includeBasePath = true;
+ return this;
+ }
+ withErrors() {
+ this.options.suppressErrors = false;
+ return this;
+ }
+ withSymlinks({ resolvePaths = true } = {}) {
+ this.options.resolveSymlinks = true;
+ this.options.useRealPaths = resolvePaths;
+ return this.withFullPaths();
+ }
+ withAbortSignal(signal) {
+ this.options.signal = signal;
+ return this;
+ }
+ normalize() {
+ this.options.normalizePath = true;
+ return this;
+ }
+ filter(predicate) {
+ this.options.filters.push(predicate);
+ return this;
+ }
+ onlyDirs() {
+ this.options.excludeFiles = true;
+ this.options.includeDirs = true;
+ return this;
+ }
+ exclude(predicate) {
+ this.options.exclude = predicate;
+ return this;
+ }
+ onlyCounts() {
+ this.options.onlyCounts = true;
+ return this;
+ }
+ crawl(root) {
+ return new APIBuilder(root || ".", this.options);
+ }
+ withGlobFunction(fn) {
+ this.globFunction = fn;
+ return this;
+ }
+ /**
+ * @deprecated Pass options using the constructor instead:
+ * ```ts
+ * new fdir(options).crawl("/path/to/root");
+ * ```
+ * This method will be removed in v7.0
+ */
+ /* c8 ignore next 4 */
+ crawlWithOptions(root, options) {
+ this.options = {
+ ...this.options,
+ ...options
+ };
+ return new APIBuilder(root || ".", this.options);
+ }
+ glob(...patterns) {
+ if (this.globFunction) return this.globWithOptions(patterns);
+ return this.globWithOptions(patterns, ...[{ dot: true }]);
+ }
+ globWithOptions(patterns, ...options) {
+ const globFn = this.globFunction || pm;
+ /* c8 ignore next 5 */
+ if (!globFn) throw new Error("Please specify a glob function to use glob matching.");
+ var isMatch = this.globCache[patterns.join("\0")];
+ if (!isMatch) {
+ isMatch = globFn(patterns, ...options);
+ this.globCache[patterns.join("\0")] = isMatch;
+ }
+ this.options.filters.push((path$1) => isMatch(path$1));
+ return this;
+ }
+};
+
+//#endregion
+exports.fdir = Builder;
\ No newline at end of file
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
new file mode 100644
index 00000000000000..8eb36bc363449a
--- /dev/null
+++ b/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
@@ -0,0 +1,134 @@
+///
+import picomatch from "picomatch";
+
+//#region src/api/queue.d.ts
+type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
+/**
+ * This is a custom stateless queue to track concurrent async fs calls.
+ * It increments a counter whenever a call is queued and decrements it
+ * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+ */
+declare class Queue {
+ private onQueueEmpty?;
+ count: number;
+ constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined);
+ enqueue(): number;
+ dequeue(error: Error | null, output: WalkerState): void;
+}
+//#endregion
+//#region src/types.d.ts
+type Counts = {
+ files: number;
+ directories: number;
+ /**
+ * @deprecated use `directories` instead. Will be removed in v7.0.
+ */
+ dirs: number;
+};
+type Group = {
+ directory: string;
+ files: string[];
+ /**
+ * @deprecated use `directory` instead. Will be removed in v7.0.
+ */
+ dir: string;
+};
+type GroupOutput = Group[];
+type OnlyCountsOutput = Counts;
+type PathsOutput = string[];
+type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type WalkerState = {
+ root: string;
+ paths: string[];
+ groups: Group[];
+ counts: Counts;
+ options: Options;
+ queue: Queue;
+ controller: AbortController;
+ symlinks: Map;
+ visited: string[];
+};
+type ResultCallback = (error: Error | null, output: TOutput) => void;
+type FilterPredicate = (path: string, isDirectory: boolean) => boolean;
+type ExcludePredicate = (dirName: string, dirPath: string) => boolean;
+type PathSeparator = "/" | "\\";
+type Options = {
+ includeBasePath?: boolean;
+ includeDirs?: boolean;
+ normalizePath?: boolean;
+ maxDepth: number;
+ maxFiles?: number;
+ resolvePaths?: boolean;
+ suppressErrors: boolean;
+ group?: boolean;
+ onlyCounts?: boolean;
+ filters: FilterPredicate[];
+ resolveSymlinks?: boolean;
+ useRealPaths?: boolean;
+ excludeFiles?: boolean;
+ excludeSymlinks?: boolean;
+ exclude?: ExcludePredicate;
+ relativePaths?: boolean;
+ pathSeparator: PathSeparator;
+ signal?: AbortSignal;
+ globFunction?: TGlobFunction;
+};
+type GlobMatcher = (test: string) => boolean;
+type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
+type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : [];
+//#endregion
+//#region src/builder/api-builder.d.ts
+declare class APIBuilder {
+ private readonly root;
+ private readonly options;
+ constructor(root: string, options: Options);
+ withPromise(): Promise;
+ withCallback(cb: ResultCallback): void;
+ sync(): TReturnType;
+}
+//#endregion
+//#region src/builder/index.d.ts
+declare class Builder {
+ private readonly globCache;
+ private options;
+ private globFunction?;
+ constructor(options?: Partial