diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index da85ed78dd6f79..5ef23759b3e18e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -18,6 +18,7 @@ /doc/contributing/**/* @nodejs/tsc /GOVERNANCE.md @nodejs/tsc /SECURITY.md @nodejs/tsc +/BUILDING.md @nodejs/build @nodejs/tsc /LICENSE @nodejs/tsc /onboarding.md @nodejs/tsc @@ -223,3 +224,8 @@ /lib/internal/inspector/* @nodejs/inspector /lib/internal/inspector_* @nodejs/inspector /lib/inspector.js @nodejs/inspector + +# path +/lib/path.js @nodejs/path +/lib/path/* @nodejs/path +/test/parallel/test-path-* @nodejs/path diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 956058c6889e4c..c8e0cafa0f7098 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -80,6 +80,6 @@ jobs: - name: Clean tmp run: rm -rf coverage/tmp && rm -rf out - name: Upload - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 + uses: codecov/codecov-action@39a2af19d997be74586469d4062e173ecae614f6 # v5.4.3+ with: directory: ./coverage diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 3a7ad75d375875..4e620a7a848f24 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -80,6 +80,6 @@ jobs: - name: Clean tmp run: rm -rf coverage/tmp && rm -rf out - name: Upload - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 + uses: codecov/codecov-action@39a2af19d997be74586469d4062e173ecae614f6 # v5.4.3+ with: directory: ./coverage diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index 5acb24cfc707df..3b4848778bc273 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -71,6 +71,6 @@ jobs: - name: Clean tmp run: npx rimraf ./coverage/tmp - name: Upload - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 + uses: codecov/codecov-action@39a2af19d997be74586469d4062e173ecae614f6 # v5.4.3+ with: directory: ./coverage diff --git a/.github/workflows/lint-release-proposal.yml b/.github/workflows/lint-release-proposal.yml index 88bbd5d4e9b068..101fa9964f0c73 100644 --- a/.github/workflows/lint-release-proposal.yml +++ b/.github/workflows/lint-release-proposal.yml @@ -39,7 +39,7 @@ jobs: EXPECTED_TRAILER="^$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/pull/[[:digit:]]+\$" echo "Expected trailer format: $EXPECTED_TRAILER" PR_URL="$(git --no-pager log -1 --format='%(trailers:key=PR-URL,valueonly)')" - echo "Actual: $ACTUAL" + echo "Actual: $PR_URL" echo "$PR_URL" | grep -E -q "$EXPECTED_TRAILER" PR_HEAD="$(gh pr view "$PR_URL" --json headRefOid -q .headRefOid)" diff --git a/.mailmap b/.mailmap index c0aac7e9647a69..0422fba4472601 100644 --- a/.mailmap +++ b/.mailmap @@ -6,6 +6,7 @@ Abdirahim Musse <33973272+abmusse@users.noreply.github Abe Fettig Abhimanyu Vashisht Adam Langley +Aditi Singh Akhil Marsonya Akhil Marsonya <16393876+marsonya@users.noreply.github.com> Akito Ito diff --git a/BUILDING.md b/BUILDING.md index cedf2cb5e88d1e..d264bf79f3aab7 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -281,6 +281,11 @@ export CXX=g++-12 make -j4 ``` +> \[!IMPORTANT] +> If you face a compilation error during this process such as +> `error: no matching conversion for functional-style cast from 'unsigned int' to 'TypeIndex'` +> Make sure to use a `g++` or `clang` version compatible with C++20. + We can speed up the builds by using [Ninja](https://ninja-build.org/). For more information, see [Building Node.js with Ninja](doc/contributing/building-node-with-ninja.md). diff --git a/CHANGELOG.md b/CHANGELOG.md index 29808540ab2387..9a1e5cb6b2ee24 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,7 +37,8 @@ release. -22.18.0
+22.19.0
+22.18.0
22.17.1
22.17.0
22.16.0
diff --git a/LICENSE b/LICENSE index ac9e2ef9b13f5d..bed41c4c05b51a 100644 --- a/LICENSE +++ b/LICENSE @@ -2100,7 +2100,7 @@ The externally maintained libraries used by Node.js are: - inspector_protocol, located at deps/inspector_protocol, is licensed as follows: """ - // Copyright 2016 The Chromium Authors. All rights reserved. + // Copyright 2016 The Chromium Authors. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are @@ -2639,3 +2639,28 @@ The externally maintained libraries used by Node.js are: OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ + +- sonic-boom, located at lib/internal/streams/fast-utf8-stream.js, is licensed as follows: + """ + MIT License + + Copyright (c) 2017 Matteo Collina + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + """ diff --git a/Makefile b/Makefile index 915556b3230e25..6d173a633266dc 100644 --- a/Makefile +++ b/Makefile @@ -1661,7 +1661,7 @@ HAS_DOCKER ?= $(shell command -v docker > /dev/null 2>&1; [ $$? -eq 0 ] && echo .PHONY: gen-openssl ifeq ($(HAS_DOCKER), 1) -DOCKER_COMMAND ?= docker run -it -v $(PWD):/node +DOCKER_COMMAND ?= docker run --rm -u $(shell id -u) -v $(PWD):/node IS_IN_WORKTREE = $(shell grep '^gitdir: ' $(PWD)/.git 2>/dev/null) GIT_WORKTREE_COMMON = $(shell git rev-parse --git-common-dir) DOCKER_COMMAND += $(if $(IS_IN_WORKTREE), -v $(GIT_WORKTREE_COMMON):$(GIT_WORKTREE_COMMON)) diff --git a/README.md b/README.md index 6e3cb0e3d7843a..9b728ffd86547d 100644 --- a/README.md +++ b/README.md @@ -95,37 +95,27 @@ _docs_ subdirectory. Version-specific documentation is also at ### Verifying binaries -Download directories contain a `SHASUMS256.txt` file with SHA checksums for the -files. +Download directories contain a `SHASUMS256.txt.asc` file with SHA checksums for the +files and the releaser PGP signature. -To download `SHASUMS256.txt` using `curl`: +You can get a trusted keyring from nodejs/release-keys, e.g. using `curl`: ```bash -curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt +curl -fsLo "/path/to/nodejs-keyring.kbx" "https://github.com/nodejs/release-keys/raw/HEAD/gpg/pubring.kbx" ``` -To check that downloaded files match the checksum, use `sha256sum`: +Alternatively, you can import the releaser keys in your default keyring, see +[Release keys](#release-keys) for commands to how to do that. -```bash -sha256sum -c SHASUMS256.txt --ignore-missing -``` - -For Current and LTS, the GPG detached signature of `SHASUMS256.txt` is in -`SHASUMS256.txt.sig`. You can use it with `gpg` to verify the integrity of -`SHASUMS256.txt`. You will first need to import -[the GPG keys of individuals authorized to create releases](#release-keys). - -See [Release keys](#release-keys) for commands to import active release keys. - -Next, download the `SHASUMS256.txt.sig` for the release: +Then, you can verify the files you've downloaded locally +(if you're using your default keyring, pass `--keyring="${GNUPGHOME:-~/.gnupg}/pubring.kbx"`): ```bash -curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt.sig +curl -fsO "https://nodejs.org/dist/${VERSION}/SHASUMS256.txt.asc" \ +&& gpgv --keyring="/path/to/nodejs-keyring.kbx" --output SHASUMS256.txt < SHASUMS256.txt.asc \ +&& shasum --check SHASUMS256.txt --ignore-missing ``` -Then use `gpg --verify SHASUMS256.txt.sig SHASUMS256.txt` to verify -the file's signature. - ## Building Node.js See [BUILDING.md](BUILDING.md) for instructions on how to build Node.js from @@ -289,6 +279,8 @@ For information about the governance of the Node.js project, see **Abdirahim Musse** <> * [addaleax](https://github.com/addaleax) - **Anna Henningsen** <> (she/her) +* [Aditi-1400](https://github.com/Aditi-1400) - + **Aditi Singh** <> (she/her) * [aduh95](https://github.com/aduh95) - **Antoine du Hamel** <> (he/him) - [Support me](https://github.com/sponsors/aduh95) * [anonrig](https://github.com/anonrig) - @@ -383,8 +375,6 @@ For information about the governance of the Node.js project, see **Chengzhong Wu** <> (he/him) * [lemire](https://github.com/lemire) - **Daniel Lemire** <> -* [Linkgoron](https://github.com/Linkgoron) - - **Nitzan Uziely** <> * [LiviaMedeiros](https://github.com/LiviaMedeiros) - **LiviaMedeiros** <> * [ljharb](https://github.com/ljharb) - @@ -424,7 +414,7 @@ For information about the governance of the Node.js project, see * [Qard](https://github.com/Qard) - **Stephen Belanger** <> (he/him) * [RafaelGSS](https://github.com/RafaelGSS) - - **Rafael Gonzaga** <> (he/him) + **Rafael Gonzaga** <> (he/him) - [Support me](https://github.com/sponsors/RafaelGSS) * [RaisinTen](https://github.com/RaisinTen) - **Darshan Sen** <> (he/him) - [Support me](https://github.com/sponsors/RaisinTen) * [richardlau](https://github.com/richardlau) - @@ -597,6 +587,8 @@ For information about the governance of the Node.js project, see **Lance Ball** <> (he/him) * [Leko](https://github.com/Leko) - **Shingo Inoue** <> (he/him) +* [Linkgoron](https://github.com/Linkgoron) - + **Nitzan Uziely** <> * [lucamaraschi](https://github.com/lucamaraschi) - **Luca Maraschi** <> (he/him) * [lundibundi](https://github.com/lundibundi) - @@ -806,8 +798,11 @@ Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): * **Ulises Gascón** <> `A363A499291CBBC940DD62E41F10027AF002F8B0` -To import the full set of trusted release keys (including subkeys possibly used -to sign releases): +You can use the keyring the project maintains at +. +Alternatively, you can import them from a public key server. Have in mind that +the project cannot guarantee the availability of the server nor the keys on +that server. ```bash gpg --keyserver hkps://keys.openpgp.org --recv-keys 5BE8A3F6C8A5C01D106C0AD820B1A390B168D356 # Antoine du Hamel @@ -867,6 +862,9 @@ verify a downloaded file. * **Timothy J Fontaine** <> `7937DFD2AB06298B2293C3187D33FF9D0246406D` +The project maintains a keyring able to verify all past releases of Node.js at +. + ### Security release stewards @@ -882,6 +880,9 @@ releases on a rotation basis as outlined in the * [Datadog](https://www.datadoghq.com/) * [bengl](https://github.com/bengl) - **Bryan English** <> (he/him) +* [HeroDevs](https://www.herodevs.com/) + * [marco-ippolito](https://github.com/marco-ippolito) - + **Marco Ippolito** <> (he/him) * [NodeSource](https://nodesource.com/) * [juanarbol](https://github.com/juanarbol) - **Juan José Arboleda** <> (he/him) diff --git a/SECURITY.md b/SECURITY.md index 9650e812914f81..d5cc79095371e3 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -109,6 +109,22 @@ does not trust is considered a vulnerability: the correct use of Node.js APIs. * The unavailability of the runtime, including the unbounded degradation of its performance. +* Memory leaks qualify as vulnerabilities when all of the following criteria are met: + * The API is being correctly used. + * The API doesn't have a warning against its usage in a production environment. + * The API is public and documented. + * The API is on stable (2.0) status. + * The memory leak is significant enough to cause a denial of service quickly + or in a context not controlled by the user (for example, HTTP parsing). + * The memory leak is directly exploitable by an untrusted source without requiring application mistakes. + * The leak cannot be reasonably mitigated through standard operational practices (like process recycling). + * The leak occurs deterministically under normal usage patterns rather than edge cases. + * The leak occurs at a rate that would cause practical resource exhaustion within a practical timeframe under + typical workloads. + * The attack demonstrates [asymmetric resource consumption](https://cwe.mitre.org/data/definitions/405.html), + where the attacker expends significantly fewer resources than what's required by the server to process the + attack. Attacks requiring comparable resources on the attacker's side (which can be mitigated through common + practices like rate limiting) may not qualify. If Node.js loads configuration files or runs code by default (without a specific request from the user), and this is not documented, it is considered a @@ -125,7 +141,7 @@ Vulnerabilities related to this case may be fixed by a documentation update. * The data received from the remote end of outbound network connections that are created through the use of Node.js APIs and which is transformed/validated by Node.js before being passed - to the application EXCEPT with respect to payload length. Node.js trusts + to the application **except** with respect to payload length. Node.js trusts that applications make connections/requests which will avoid payload sizes that will result in a Denial of Service. * HTTP APIs (all flavors) client APIs. @@ -146,9 +162,9 @@ then untrusted input must not lead to arbitrary JavaScript code execution. **Node.js trusts everything else**. Examples include: -* The developers and infrastructure that runs it. +* The developers and infrastructure that run it. * The operating system that Node.js is running under and its configuration, - along with anything under control of the operating system. + along with anything under the control of the operating system. * The code it is asked to run, including JavaScript, WASM and native code, even if said code is dynamically loaded, e.g., all dependencies installed from the npm registry. @@ -163,6 +179,11 @@ then untrusted input must not lead to arbitrary JavaScript code execution. See . * The `node:wasi` module does not currently provide the comprehensive file system security properties provided by some WASI runtimes. +* The execution path is trusted. Additionally, Node.js path manipulation functions + such as `path.join()` and `path.normalize()` trust their input. Reports about issues + related to these functions that rely on unsanitized input are not considered vulnerabilities + requiring CVEs, as it's the user's responsibility to sanitize path inputs according to + their security requirements. Any unexpected behavior from the data manipulation from Node.js Internal functions may be considered a vulnerability if they are exploitable via @@ -184,12 +205,12 @@ the community they pose. * Node.js provides APIs to validate handling of Subject Alternative Names (SANs) in certificates used to connect to a TLS/SSL endpoint. If certificates can be - crafted which result in incorrect validation by the Node.js APIs that is + crafted that result in incorrect validation by the Node.js APIs that is considered a vulnerability. #### Inconsistent Interpretation of HTTP Requests (CWE-444) -* Node.js provides APIs to accept http connections. Those APIs parse the +* Node.js provides APIs to accept HTTP connections. Those APIs parse the headers received for a connection and pass them on to the application. Bugs in parsing those headers which can result in request smuggling are considered vulnerabilities. @@ -202,9 +223,9 @@ the community they pose. #### External Control of System or Configuration Setting (CWE-15) -* If Node.js automatically loads a configuration file which is not documented +* If Node.js automatically loads a configuration file that is not documented and modification of that configuration can affect the confidentiality of - data protected using the Node.js APIs this is considered a vulnerability. + data protected using the Node.js APIs, then this is considered a vulnerability. ### Examples of non-vulnerabilities @@ -227,7 +248,7 @@ the community they pose. #### External Control of System or Configuration Setting (CWE-15) -* If Node.js automatically loads a configuration file which is documented +* If Node.js automatically loads a configuration file that is documented, no scenario that requires modification of that configuration file is considered a vulnerability. @@ -247,9 +268,9 @@ the community they pose. ## Assessing experimental features reports -Experimental features are eligible to reports as any other stable feature of -Node.js. They will also be susceptible to receiving the same severity score -as any other stable feature. +Experimental features are eligible for security reports just like any other +stable feature of Node.js. They may also receive the same severity score that a +stable feature would. ## Receiving security updates diff --git a/benchmark/_cli.js b/benchmark/_cli.js index 583da8f6b20622..990e37b67ce1ab 100644 --- a/benchmark/_cli.js +++ b/benchmark/_cli.js @@ -140,8 +140,8 @@ CLI.prototype.getCpuCoreSetting = function() { const isValid = /^(\d+(-\d+)?)(,\d+(-\d+)?)*$/.test(value); if (!isValid) { throw new Error(` - Invalid CPUSET format: "${value}". Please use a single core number (e.g., "0"), - a range of cores (e.g., "0-3"), or a list of cores/ranges + Invalid CPUSET format: "${value}". Please use a single core number (e.g., "0"), + a range of cores (e.g., "0-3"), or a list of cores/ranges (e.g., "0,2,4" or "0-2,4").\n\n${this.usage} `); } diff --git a/benchmark/calibrate-n.js b/benchmark/calibrate-n.js new file mode 100644 index 00000000000000..16bb512cc50a8c --- /dev/null +++ b/benchmark/calibrate-n.js @@ -0,0 +1,292 @@ +'use strict'; + +const path = require('node:path'); +const { fork } = require('node:child_process'); +const fs = require('node:fs'); +const { styleText } = require('node:util'); + +const DEFAULT_RUNS = 30; // Number of runs for each n value +const CV_THRESHOLD = 0.05; // 5% coefficient of variation threshold +const MAX_N_INCREASE = 6; // Maximum number of times to increase n (10**6) +const INCREASE_FACTOR = 10; // Factor by which to increase n + +const args = process.argv.slice(2); +if (args.length === 0) { + console.log(` +Usage: node calibrate-n.js [options] + +Options: + --runs=N Number of runs for each n value (default: ${DEFAULT_RUNS}) + --cv-threshold=N Target coefficient of variation threshold (default: ${CV_THRESHOLD}) + --max-increases=N Maximum number of n increases to try (default: ${MAX_N_INCREASE}) + --start-n=N Initial n value to start with (default: autodetect) + --increase=N Factor by which to increase n (default: ${INCREASE_FACTOR}) + +Example: + node calibrate-n.js buffers/buffer-compare.js + node calibrate-n.js --runs=10 --cv-threshold=0.02 buffers/buffer-compare.js + `); + process.exit(1); +} + +// Extract options +let benchmarkPath; +let runs = DEFAULT_RUNS; +let cvThreshold = CV_THRESHOLD; +let maxIncreases = MAX_N_INCREASE; +let startN = 10; +let increaseFactor = INCREASE_FACTOR; + +for (const arg of args) { + if (arg.startsWith('--runs=')) { + runs = parseInt(arg.substring(7), 10); + } else if (arg.startsWith('--cv-threshold=')) { + cvThreshold = parseFloat(arg.substring(14)); + } else if (arg.startsWith('--max-increases=')) { + maxIncreases = parseInt(arg.substring(15), 10); + if (isNaN(maxIncreases)) { + console.error(`Error: Invalid value for --max-increases. Using default: ${MAX_N_INCREASE}`); + maxIncreases = MAX_N_INCREASE; + } + } else if (arg.startsWith('--start-n=')) { + startN = parseInt(arg.substring(10), 10); + if (isNaN(startN)) { + console.error(`Error: Invalid value for --start-n. Using default: 10`); + startN = 10; + } + } else if (arg.startsWith('--increase=')) { + increaseFactor = parseInt(arg.substring(11), 10); + if (isNaN(increaseFactor)) { + console.error(`Error: Invalid value for --increase. Using default: ${INCREASE_FACTOR}`); + increaseFactor = INCREASE_FACTOR; + } + } else { + benchmarkPath = arg; + } +} + +if (!benchmarkPath) { + console.error('Error: No benchmark path specified'); + process.exit(1); +} + +const fullBenchmarkPath = path.resolve(benchmarkPath); +if (!fs.existsSync(fullBenchmarkPath)) { + console.error(`Error: Benchmark file not found: ${fullBenchmarkPath}`); + process.exit(1); +} + +function calculateStats(values) { + const mean = values.reduce((sum, val) => sum + val, 0) / values.length; + + const squaredDiffs = values.map((val) => { + const diff = val - mean; + const squared = diff ** 2; + return squared; + }); + + const variance = squaredDiffs.reduce((sum, val) => sum + val, 0) / values.length; + const stdDev = Math.sqrt(variance); + const cv = stdDev / mean; + + return { mean, stdDev, cv, variance }; +} + +function runBenchmark(n) { + return new Promise((resolve, reject) => { + const child = fork( + fullBenchmarkPath, + [`n=${n}`], + { stdio: ['inherit', 'pipe', 'inherit', 'ipc'] }, + ); + + const results = []; + child.on('message', (data) => { + if (data.type === 'report' && data.rate && data.conf) { + results.push({ + rate: data.rate, + conf: data.conf, + }); + } + }); + + child.on('close', (code) => { + if (code !== 0) { + reject(new Error(`Benchmark exited with code ${code}`)); + } else { + resolve(results); + } + }); + }); +} + +async function main(n = startN) { + let increaseCount = 0; + let bestN = n; + let bestCV = Infinity; + let bestGroupStats = null; + + console.log(` +-------------------------------------------------------- +Benchmark: ${benchmarkPath} +-------------------------------------------------------- +What we are trying to find: The optimal number of iterations (n) +that produces consistent benchmark results without wasting time. + +How it works: +1. Run the benchmark multiple times with a specific n value +2. Group results by configuration +3. If overall CV is above 5% or any configuration has CV above 10%, increase n and try again + +Configuration: +- Starting n: ${n.toLocaleString()} iterations +- Runs per n value: ${runs} +- Target CV threshold: ${cvThreshold * 100}% (lower CV = more stable results) +- Max increases: ${maxIncreases} +- Increase factor: ${increaseFactor}x`); + + while (increaseCount < maxIncreases) { + console.log(`\nTesting with n=${n}:`); + + const resultsData = []; + for (let i = 0; i < runs; i++) { + const results = await runBenchmark(n); + // Each run might return multiple results (one per configuration) + if (Array.isArray(results) && results.length > 0) { + resultsData.push(...results); + } else if (results) { + resultsData.push(results); + } + process.stdout.write('.'); + } + process.stdout.write('\n'); + + const groupedResults = {}; + resultsData.forEach((result) => { + if (!result || !result.conf) return; + + const confKey = JSON.stringify(result.conf); + groupedResults[confKey] ||= { + conf: result.conf, + rates: [], + }; + + groupedResults[confKey].rates.push(result.rate); + }); + + const groupStats = []; + for (const [confKey, group] of Object.entries(groupedResults)) { + console.log(`\nConfiguration: ${JSON.stringify(group.conf)}`); + + const stats = calculateStats(group.rates); + console.log(` CV: ${(stats.cv * 100).toFixed(2)}% (lower values mean more stable results)`); + + const isStable = stats.cv <= cvThreshold; + console.log(` Stability: ${isStable ? + styleText(['bold', 'green'], '✓ Stable') : + styleText(['bold', 'red'], '✗ Unstable')}`); + + groupStats.push({ + confKey, + stats, + isStable, + }); + } + + if (groupStats.length > 0) { + // Check if any configuration has CV > 10% (too unstable) + const tooUnstableConfigs = groupStats.filter((g) => g.stats.cv > 0.10); + + const avgCV = groupStats.reduce((sum, g) => sum + g.stats.cv, 0) / groupStats.length; + console.log(`\nOverall average CV: ${(avgCV * 100).toFixed(2)}%`); + + const isOverallStable = avgCV < CV_THRESHOLD; + const hasVeryUnstableConfigs = tooUnstableConfigs.length > 0; + + // Check if overall CV is below CV_THRESHOLD and no configuration has CV > 10% + if (isOverallStable && !hasVeryUnstableConfigs) { + console.log(styleText(['bold', 'green'], ` ✓ Overall CV is below 5% and no configuration has CV above 10%`)); + } else { + if (!isOverallStable) { + console.log(styleText(['bold', 'red'], ` ✗ Overall CV (${(avgCV * 100).toFixed(2)}%) is above 5%`)); + } + if (hasVeryUnstableConfigs) { + console.log(styleText(['bold', 'red'], ` ✗ ${tooUnstableConfigs.length} configuration(s) have CV above 10%`)); + } + } + + if (avgCV < bestCV || !bestGroupStats) { + bestN = n; + bestCV = avgCV; + + bestGroupStats = []; + for (const group of Object.values(groupedResults)) { + if (group.rates.length >= 3) { + const stats = calculateStats(group.rates); + bestGroupStats.push({ + conf: group.conf, + stats: stats, + isStable: stats.cv <= 0.10, + }); + } + } + console.log(` → New best n: ${n} with average CV: ${(avgCV * 100).toFixed(2)}%`); + } else { + console.log(` → Current best n remains: ${bestN} with average CV: ${(bestCV * 100).toFixed(2)}%`); + } + } + + // Check if we've reached acceptable stability based on new criteria + // 1. Overall CV should be below CV_THRESHOLD + // 2. No configuration should have a CV greater than 10% + const avgCV = groupStats.length > 0 ? + groupStats.reduce((sum, g) => sum + g.stats.cv, 0) / groupStats.length : Infinity; + const hasUnstableConfig = groupStats.some((g) => g.stats.cv > 0.10); + const isOverallStable = avgCV < CV_THRESHOLD; + + if (isOverallStable && !hasUnstableConfig) { + console.log(`\n✓ Found optimal n=${n} (Overall CV=${(avgCV * 100).toFixed(2)}% < 5% and no configuration has CV > 10%)`); + console.log('\nFinal CV for each configuration:'); + groupStats.forEach((g) => { + console.log(` ${JSON.stringify(groupedResults[g.confKey].conf)}: ${(g.stats.cv * 100).toFixed(2)}%`); + }); + + return n; + } + + increaseCount++; + n *= increaseFactor; + } + + if (increaseCount >= maxIncreases) { + const finalAvgCV = bestGroupStats && bestGroupStats.length > 0 ? + bestGroupStats.reduce((sum, g) => sum + g.stats.cv, 0) / bestGroupStats.length : Infinity; + + console.log(`Maximum number of increases (${maxIncreases}) reached without achieving target stability`); + console.log(`Best n found: ${bestN} with average CV=${(finalAvgCV * 100).toFixed(2)}%`); + console.log(`\nCV by configuration at best n:`); + + if (bestGroupStats) { + bestGroupStats.forEach((g) => { + if (g.conf) { + console.log(` ${JSON.stringify(g.conf)}: ${(g.stats.cv * 100).toFixed(2)}%`); + if (g.stats.cv > cvThreshold) { + console.log(` ⚠️ This configuration is above the target threshold of ${cvThreshold * 100}%`); + } + } + }); + } + } + + console.log(` +Recommendation: You might want to try increasing --max-increases to +continue testing with larger n values, or adjust --cv-threshold to +accept the current best result, or investigate if specific configurations +are contributing to instability.`); + return bestN; +} + +main().catch((err) => { + console.error('Error:', err); + process.exit(1); +}); diff --git a/benchmark/diagnostics_channel/subscribe.js b/benchmark/diagnostics_channel/subscribe.js index 1415054588c4b1..c7cffac5edb7e1 100644 --- a/benchmark/diagnostics_channel/subscribe.js +++ b/benchmark/diagnostics_channel/subscribe.js @@ -3,17 +3,16 @@ const common = require('../common.js'); const dc = require('diagnostics_channel'); const bench = common.createBenchmark(main, { - n: [1e8], + n: [1e5], }); -function noop() {} +function noop() { } function main({ n }) { - const channel = dc.channel('channel.0'); bench.start(); for (let i = 0; i < n; i++) { - channel.subscribe(noop); + dc.subscribe('channel.0', noop); } bench.end(n); } diff --git a/benchmark/es/spread-assign.js b/benchmark/es/spread-assign.js deleted file mode 100644 index f0dcd56bb606b1..00000000000000 --- a/benchmark/es/spread-assign.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -const common = require('../common.js'); -const util = require('util'); - -const bench = common.createBenchmark(main, { - method: ['spread', 'assign', '_extend'], - count: [5, 10, 20], - n: [1e6], -}); - -function main({ n, context, count, rest, method }) { - - const src = {}; - for (let n = 0; n < count; n++) - src[`p${n}`] = n; - - let obj; - - switch (method) { - case '_extend': - bench.start(); - for (let i = 0; i < n; i++) - obj = util._extend({}, src); - bench.end(n); - break; - case 'assign': - bench.start(); - for (let i = 0; i < n; i++) - obj = Object.assign({}, src); - bench.end(n); - break; - case 'spread': - bench.start(); - for (let i = 0; i < n; i++) - obj = { ...src }; // eslint-disable-line no-unused-vars - bench.end(n); - break; - default: - throw new Error('Unexpected method'); - } -} diff --git a/benchmark/fs/writefile-promises.js b/benchmark/fs/writefile-promises.js index 41c029051bc04d..f271d837115406 100644 --- a/benchmark/fs/writefile-promises.js +++ b/benchmark/fs/writefile-promises.js @@ -39,8 +39,17 @@ function main({ encodingType, duration, concurrent, size }) { let writes = 0; let waitConcurrent = 0; - const startedAt = Date.now(); - const endAt = startedAt + (duration * 1000); + let startedAt = Date.now(); + let endAt = startedAt + duration * 1000; + + // fs warmup + for (let i = 0; i < concurrent; i++) write(); + + writes = 0; + waitConcurrent = 0; + + startedAt = Date.now(); + endAt = startedAt + duration * 1000; bench.start(); @@ -59,7 +68,8 @@ function main({ encodingType, duration, concurrent, size }) { } function write() { - fs.promises.writeFile(`${filename}-${filesWritten++}`, chunk, encoding) + fs.promises + .writeFile(`${filename}-${filesWritten++}`, chunk, encoding) .then(() => afterWrite()) .catch((err) => afterWrite(err)); } @@ -72,7 +82,7 @@ function main({ encodingType, duration, concurrent, size }) { writes++; const benchEnded = Date.now() >= endAt; - if (benchEnded && (++waitConcurrent) === concurrent) { + if (benchEnded && ++waitConcurrent === concurrent) { stop(); } else if (!benchEnded) { write(); diff --git a/benchmark/misc/punycode.js b/benchmark/misc/punycode.js deleted file mode 100644 index a4fda4168da47c..00000000000000 --- a/benchmark/misc/punycode.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict'; - -const common = require('../common.js'); -let icu; -try { - icu = common.binding('icu'); -} catch { - // Continue regardless of error. -} -const punycode = require('punycode'); - -const bench = common.createBenchmark(main, { - method: ['punycode'].concat(icu !== undefined ? ['icu'] : []), - n: [1024], - val: [ - 'افغانستا.icom.museum', - 'الجزائر.icom.museum', - 'österreich.icom.museum', - 'বাংলাদেশ.icom.museum', - 'беларусь.icom.museum', - 'belgië.icom.museum', - 'българия.icom.museum', - 'تشادر.icom.museum', - '中国.icom.museum', - 'القمر.icom.museum', - 'κυπρος.icom.museum', - 'českárepublika.icom.museum', - 'مصر.icom.museum', - 'ελλάδα.icom.museum', - 'magyarország.icom.museum', - 'ísland.icom.museum', - 'भारत.icom.museum', - 'ايران.icom.museum', - 'éire.icom.museum', - 'איקו״ם.ישראל.museum', - '日本.icom.museum', - 'الأردن.icom.museum', - ], -}); - -function usingPunycode(val) { - punycode.toUnicode(punycode.toASCII(val)); -} - -function usingICU(val) { - icu.toUnicode(icu.toASCII(val)); -} - -function runPunycode(n, val) { - for (let i = 0; i < n; i++) - usingPunycode(val); - bench.start(); - for (let i = 0; i < n; i++) - usingPunycode(val); - bench.end(n); -} - -function runICU(n, val) { - bench.start(); - for (let i = 0; i < n; i++) - usingICU(val); - bench.end(n); -} - -function main({ n, val, method }) { - switch (method) { - case 'punycode': - runPunycode(n, val); - break; - case 'icu': - if (icu !== undefined) { - runICU(n, val); - break; - } - // fallthrough - default: - throw new Error(`Unexpected method "${method}"`); - } -} diff --git a/benchmark/run.js b/benchmark/run.js index ea0dc415e91ec6..4948292437fdde 100644 --- a/benchmark/run.js +++ b/benchmark/run.js @@ -3,6 +3,7 @@ const path = require('path'); const { spawn, fork } = require('node:child_process'); const CLI = require('./_cli.js'); +const { styleText } = require('node:util'); const cli = new CLI(`usage: ./node run.js [options] [--] ... Run each benchmark in the directory a single time, more than one @@ -16,6 +17,7 @@ const cli = new CLI(`usage: ./node run.js [options] [--] ... Default: 1 --set variable=value set benchmark variable (can be repeated) --format [simple|csv] optional value that specifies the output format + --track Display the time elapsed to run each benchmark file. test only run a single configuration from the options matrix all each benchmark category is run one after the other @@ -25,7 +27,7 @@ const cli = new CLI(`usage: ./node run.js [options] [--] ... --set CPUSET=0-2 Specifies that benchmarks should run on CPU cores 0 to 2. Note: The CPUSET format should match the specifications of the 'taskset' command on your system. -`, { arrayArgs: ['set', 'filter', 'exclude'] }); +`, { arrayArgs: ['set', 'filter', 'exclude'], boolArgs: ['track'] }); const benchmarks = cli.benchmarks(); @@ -107,7 +109,12 @@ async function run() { } while (runs-- > 0) { + const start = performance.now(); await runBenchmark(filename); + if (format !== 'csv' && cli.optional.track) { + const ms = styleText(['bold', 'yellow'], `${Math.round(performance.now() - start)}ms`); + console.log(`[${ms}] ${filename}`); + } } } } diff --git a/benchmark/sqlite/sqlite-is-transaction.js b/benchmark/sqlite/sqlite-is-transaction.js index e3325ccd3d10bf..3bfc896cf91cf3 100644 --- a/benchmark/sqlite/sqlite-is-transaction.js +++ b/benchmark/sqlite/sqlite-is-transaction.js @@ -16,7 +16,7 @@ function main(conf) { } let i; - let deadCodeElimination; + let deadCodeElimination = true; bench.start(); for (i = 0; i < conf.n; i += 1) diff --git a/benchmark/string_decoder/string-decoder.js b/benchmark/string_decoder/string-decoder.js index 8acb9c15bfd6f3..bcb4eace71b3ae 100644 --- a/benchmark/string_decoder/string-decoder.js +++ b/benchmark/string_decoder/string-decoder.js @@ -1,11 +1,12 @@ 'use strict'; const common = require('../common.js'); const StringDecoder = require('string_decoder').StringDecoder; +const assert = require('node:assert'); const bench = common.createBenchmark(main, { encoding: ['ascii', 'utf8', 'base64-utf8', 'base64-ascii', 'utf16le'], - inLen: [32, 128, 1024, 4096], - chunkLen: [16, 64, 256, 1024], + inLen: [32, 128, 1024], + chunkLen: [16, 256, 1024], n: [25e5], }); @@ -75,10 +76,13 @@ function main({ encoding, inLen, chunkLen, n }) { const nChunks = chunks.length; + let avoidDeadCode; bench.start(); for (let i = 0; i < n; ++i) { + avoidDeadCode = ''; for (let j = 0; j < nChunks; ++j) - sd.write(chunks[j]); + avoidDeadCode += sd.write(chunks[j]); } bench.end(n); + assert.ok(avoidDeadCode); } diff --git a/benchmark/util/deprecate.js b/benchmark/util/deprecate.js new file mode 100644 index 00000000000000..a94a7606321003 --- /dev/null +++ b/benchmark/util/deprecate.js @@ -0,0 +1,36 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const bench = common.createBenchmark(main, { + n: [1e5], + modifyPrototype: [1, 0], + emitWarningSync: [1, 0], +}, { + flags: ['--expose-internals'], +}); + +function simpleFunction(x) { + return x * 2 + (new Array(1000)).fill(0).map((_, i) => i).reduce((a, b) => a + b, 0); +} + +function main({ n, modifyPrototype, emitWarningSync }) { + const { deprecate } = require('internal/util'); + + const fn = deprecate( + simpleFunction, + 'This function is deprecated', + 'DEP0000', + emitWarningSync, + !!modifyPrototype, + ); + + let sum = 0; + bench.start(); + for (let i = 0; i < n; ++i) { + sum += fn(i); + } + bench.end(n); + assert.ok(sum); +} diff --git a/common.gypi b/common.gypi index 20135003dd040e..7780ae106b479c 100644 --- a/common.gypi +++ b/common.gypi @@ -38,7 +38,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.27', + 'v8_embedder_string': '-node.29', ##### V8 defaults for Node.js ##### diff --git a/deps/corepack/CHANGELOG.md b/deps/corepack/CHANGELOG.md index e4f0b185a73616..fdf937785ca5d4 100644 --- a/deps/corepack/CHANGELOG.md +++ b/deps/corepack/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.34.0](https://github.com/nodejs/corepack/compare/v0.33.0...v0.34.0) (2025-07-19) + + +### ⚠ BREAKING CHANGES + +* drop Node.js 18.x and 23.x support + +### Features + +* update package manager versions ([#719](https://github.com/nodejs/corepack/issues/719)) ([7707ea7](https://github.com/nodejs/corepack/commit/7707ea7350c129ad3aae8ca08e9e80fcf164dcb6)) + + +### Miscellaneous Chores + +* remove Node.js 18.x and 23.x usage, add 24.x ([#718](https://github.com/nodejs/corepack/issues/718)) ([783a42f](https://github.com/nodejs/corepack/commit/783a42fbe35371964e9dde75e2263b179f53bc0c)) + ## [0.33.0](https://github.com/nodejs/corepack/compare/v0.32.0...v0.33.0) (2025-06-02) diff --git a/deps/corepack/README.md b/deps/corepack/README.md index 079746ee796366..7ddf1de40022cb 100644 --- a/deps/corepack/README.md +++ b/deps/corepack/README.md @@ -11,7 +11,7 @@ and pnpm without having to install them**. ### Default Installs -Corepack is [distributed by default with all recent Node.js versions](https://nodejs.org/api/corepack.html). +Corepack is distributed with Node.js from version 14.19.0 up to (but not including) 25.0.0. Run `corepack enable` to install the required Yarn and pnpm binaries on your path. ### Manual Installs diff --git a/deps/corepack/dist/lib/corepack.cjs b/deps/corepack/dist/lib/corepack.cjs index c6854077d0fa6b..78fb4372832e3b 100644 --- a/deps/corepack/dist/lib/corepack.cjs +++ b/deps/corepack/dist/lib/corepack.cjs @@ -21683,7 +21683,7 @@ function String2(descriptor, ...args) { } // package.json -var version = "0.33.0"; +var version = "0.34.0"; // sources/Engine.ts var import_fs9 = __toESM(require("fs")); @@ -21697,7 +21697,7 @@ var import_valid4 = __toESM(require_valid2()); var config_default = { definitions: { npm: { - default: "11.4.1+sha1.80350af543069991de20657ebcd07d9624cfad06", + default: "11.4.2+sha1.6f1519a03f7e04023a957a22b812832d0c4a4b33", fetchLatestFrom: { type: "npm", package: "npm" @@ -21734,7 +21734,7 @@ var config_default = { } }, pnpm: { - default: "10.11.0+sha1.4048eeefd564ff1ab248fac3e2854d38245fe2f1", + default: "10.13.1+sha1.aa8c167c4509c97519542ef77a09e4b8ab59fb6a", fetchLatestFrom: { type: "npm", package: "pnpm" @@ -21798,7 +21798,7 @@ var config_default = { package: "yarn" }, transparent: { - default: "4.9.1+sha224.4285002185abb91fe2b781f27fd1e078086c37a7b095f6ea4ee25971", + default: "4.9.2+sha224.b8e0b161ae590950fbda696e6f3ca071362768e5280c5fbfdadf064b", commands: [ [ "yarn", diff --git a/deps/corepack/package.json b/deps/corepack/package.json index 7bf3d9e6604a50..baa2a678c8b092 100644 --- a/deps/corepack/package.json +++ b/deps/corepack/package.json @@ -1,6 +1,6 @@ { "name": "corepack", - "version": "0.33.0", + "version": "0.34.0", "homepage": "https://github.com/nodejs/corepack#readme", "bugs": { "url": "https://github.com/nodejs/corepack/issues" @@ -10,7 +10,7 @@ "url": "https://github.com/nodejs/corepack.git" }, "engines": { - "node": "^18.17.1 || ^20.10.0 || >=22.11.0" + "node": "^20.10.0 || ^22.11.0 || >=24.0.0" }, "exports": { "./package.json": "./package.json" diff --git a/deps/googletest/src/gtest.cc b/deps/googletest/src/gtest.cc index 9300e5c0945dd0..cd218c9b0b516b 100644 --- a/deps/googletest/src/gtest.cc +++ b/deps/googletest/src/gtest.cc @@ -713,7 +713,7 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() { const char* const gtest_output_flag = s.c_str(); std::string format = GetOutputFormat(); - if (format.empty()) format = std::string(kDefaultOutputFormat); + if (format.empty()) format = kDefaultOutputFormat; const char* const colon = strchr(gtest_output_flag, ':'); if (colon == nullptr) @@ -3305,6 +3305,7 @@ bool ShouldUseColor(bool stdout_is_tty) { const bool term_supports_color = term != nullptr && (String::CStringEquals(term, "xterm") || String::CStringEquals(term, "xterm-color") || + String::CStringEquals(term, "xterm-ghostty") || String::CStringEquals(term, "xterm-kitty") || String::CStringEquals(term, "alacritty") || String::CStringEquals(term, "screen") || @@ -4354,8 +4355,8 @@ void XmlUnitTestResultPrinter::OutputXmlTestResult(::std::ostream* stream, internal::FormatCompilerIndependentFileLocation(part.file_name(), part.line_number()); const std::string summary = location + "\n" + part.summary(); - *stream << " "; + *stream << " "; const std::string detail = location + "\n" + part.message(); OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str()); *stream << "\n"; @@ -6091,7 +6092,11 @@ bool UnitTestImpl::RunAllTests() { constexpr char kNoTestsSelectedMessage[] = "No tests were selected to run. Please make sure at least one test " "exists and is not disabled! If the test is sharded, you may have " - "defined more shards than test cases, which is wasteful."; + "defined more shards than test cases, which is wasteful. If you also " + "defined --gtest_filter, that filter is taken into account, so " + "shards with no matching test cases will hit this error. Either " + "disable sharding, set --gtest_fail_if_no_test_selected=false, or " + "remove the filter to resolve this error."; ColoredPrintf(GTestColor::kRed, "%s\n", kNoTestsSelectedMessage); return false; } diff --git a/deps/openssl/config/Makefile b/deps/openssl/config/Makefile index 48d2af80019150..8fbc201ec44001 100644 --- a/deps/openssl/config/Makefile +++ b/deps/openssl/config/Makefile @@ -37,9 +37,11 @@ OPSSL_SRC = ../openssl # Header files generated with Configure #INT_CFGS = bn_conf.h dso_conf.h INT_CFG_DIR = $(OPSSL_SRC)/include/crypto -GEN_HEADERS = asn1 asn1t bio cmp cms configuration conf crmf crypto ct err \ - ess fipskey lhash ocsp opensslv pkcs12 pkcs7 safestack srp ssl \ - ui x509 x509v3 x509_vfy conf +GEN_HEADERS = asn1 asn1t bio comp cmp cms conf configuration core_names crmf \ + crypto ct err ess fipskey lhash ocsp opensslv pkcs12 pkcs7 \ + safestack srp ssl ui x509_acert x509 x509v3 x509_vfy + +INTERNAL_GEN_HEADERS = param_names CRYPTO_GEN_HEADERS = bn_conf dso_conf @@ -52,24 +54,24 @@ all: $(ASM_ARCHS) $(NO_ASM_ARCHS) generate_headers # Configure and generate openssl asm files for each archs $(ASM_ARCHS): cd $(OPSSL_SRC); $(NO_WARN_ENV) CC=$(CC) $(PERL) $(CONFIGURE) $(COPTS) $@; - $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" + $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" "${INTERNAL_GEN_HEADERS}" # Confgure asm_avx2 and generate upto avx2 support cd $(OPSSL_SRC); $(NO_WARN_ENV) CC=$(FAKE_GCC) $(PERL) $(CONFIGURE) \ $(COPTS) $@; - $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) asm_avx2 $@ "${GEN_HEADERS}" "${CRYTO_GEN_HEADERS}" + $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) asm_avx2 $@ "${GEN_HEADERS}" "${CRYTO_GEN_HEADERS}" "${INTERNAL_GEN_HEADERS}" # Configure no-asm and generate no-asm sources cd $(OPSSL_SRC); $(NO_WARN_ENV) $(PERL) $(CONFIGURE) $(COPTS) \ no-asm $@; - $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) no-asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" + $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) no-asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" "${INTERNAL_GEN_HEADERS}" $(NO_ASM_ARCHS): # Configure no-asm and generate no-asm sources cd $(OPSSL_SRC); $(NO_WARN_ENV) $(PERL) $(CONFIGURE) $(COPTS) \ no-asm $@; - $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) no-asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" + $(PERL) -w -I$(OPSSL_SRC) $(GENERATE) no-asm $@ "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" "${INTERNAL_GEN_HEADERS}" generate_headers: - @$(PERL) -w -I$(OPSSL_SRC) ./generate_headers.pl "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" + @$(PERL) -w -I$(OPSSL_SRC) ./generate_headers.pl "${GEN_HEADERS}" "${CRYPTO_GEN_HEADERS}" "${INTERNAL_GEN_HEADERS}" clean: find archs \( -name \*.S -o -name \*.s -o -name \*.asm -o \ diff --git a/deps/openssl/config/Makefile_VC-WIN32 b/deps/openssl/config/Makefile_VC-WIN32 index fdbef72361b663..bb843a0c14f0ff 100644 --- a/deps/openssl/config/Makefile_VC-WIN32 +++ b/deps/openssl/config/Makefile_VC-WIN32 @@ -1,8 +1,8 @@ BLDDIR=. PERL=perl RM= rm -f -GENERATED_MANDATORY=include/crypto/bn_conf.h include/crypto/dso_conf.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h -GENERATED=apps/CA.pl apps/openssl.rc apps/progs.h apps/tsget.pl crypto/aes/aes-586.asm crypto/aes/aesni-x86.asm crypto/aes/vpaes-x86.asm crypto/bf/bf-586.asm crypto/bn/bn-586.asm crypto/bn/co-586.asm crypto/bn/x86-gf2m.asm crypto/bn/x86-mont.asm crypto/buildinf.h crypto/camellia/cmll-x86.asm crypto/chacha/chacha-x86.asm crypto/des/crypt586.asm crypto/des/des-586.asm crypto/ec/ecp_nistz256-x86.asm crypto/md5/md5-586.asm crypto/modes/ghash-x86.asm crypto/poly1305/poly1305-x86.asm crypto/rc4/rc4-586.asm crypto/ripemd/rmd-586.asm crypto/sha/sha1-586.asm crypto/sha/sha256-586.asm crypto/sha/sha512-586.asm crypto/whrlpool/wp-mmx.asm crypto/x86cpuid.asm engines/capi.def engines/dasync.def engines/e_padlock-x86.asm engines/ossltest.def engines/padlock.def libcrypto.def libcrypto.rc libssl.def libssl.rc test/buildtest_aes.c test/buildtest_asn1.c test/buildtest_asn1t.c test/buildtest_async.c test/buildtest_bio.c test/buildtest_blowfish.c test/buildtest_bn.c test/buildtest_buffer.c test/buildtest_camellia.c test/buildtest_cast.c test/buildtest_cmac.c test/buildtest_cms.c test/buildtest_comp.c test/buildtest_conf.c test/buildtest_conf_api.c test/buildtest_crypto.c test/buildtest_ct.c test/buildtest_des.c test/buildtest_dh.c test/buildtest_dsa.c test/buildtest_dtls1.c test/buildtest_e_os2.c test/buildtest_ebcdic.c test/buildtest_ec.c test/buildtest_ecdh.c test/buildtest_ecdsa.c test/buildtest_engine.c test/buildtest_evp.c test/buildtest_hmac.c test/buildtest_idea.c test/buildtest_kdf.c test/buildtest_lhash.c test/buildtest_md4.c test/buildtest_md5.c test/buildtest_mdc2.c test/buildtest_modes.c test/buildtest_obj_mac.c test/buildtest_objects.c test/buildtest_ocsp.c test/buildtest_opensslv.c test/buildtest_ossl_typ.c test/buildtest_pem.c test/buildtest_pem2.c test/buildtest_pkcs12.c test/buildtest_pkcs7.c test/buildtest_rand.c test/buildtest_rand_drbg.c test/buildtest_rc2.c test/buildtest_rc4.c test/buildtest_ripemd.c test/buildtest_rsa.c test/buildtest_safestack.c test/buildtest_seed.c test/buildtest_sha.c test/buildtest_srp.c test/buildtest_srtp.c test/buildtest_ssl.c test/buildtest_ssl2.c test/buildtest_stack.c test/buildtest_store.c test/buildtest_symhacks.c test/buildtest_tls1.c test/buildtest_ts.c test/buildtest_txt_db.c test/buildtest_ui.c test/buildtest_whrlpool.c test/buildtest_x509.c test/buildtest_x509_vfy.c test/buildtest_x509v3.c tools/c_rehash.pl providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c +GENERATED_MANDATORY=crypto/params_idx.c include/crypto/bn_conf.h include/crypto/dso_conf.h include/internal/param_names.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/comp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/core_names.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_acert.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_ml_dsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_slh_dsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h +GENERATED=apps/CA.pl apps/openssl.rc apps/progs.h apps/tsget.pl crypto/aes/aes-586.asm crypto/aes/aesni-x86.asm crypto/aes/vpaes-x86.asm crypto/bf/bf-586.asm crypto/bn/bn-586.asm crypto/bn/co-586.asm crypto/bn/x86-gf2m.asm crypto/bn/x86-mont.asm crypto/buildinf.h crypto/camellia/cmll-x86.asm crypto/chacha/chacha-x86.asm crypto/des/crypt586.asm crypto/des/des-586.asm crypto/ec/ecp_nistz256-x86.asm crypto/md5/md5-586.asm crypto/modes/ghash-x86.asm crypto/params_idx.c crypto/poly1305/poly1305-x86.asm crypto/rc4/rc4-586.asm crypto/ripemd/rmd-586.asm crypto/sha/sha1-586.asm crypto/sha/sha256-586.asm crypto/sha/sha512-586.asm crypto/whrlpool/wp-mmx.asm crypto/x86cpuid.asm engines/capi.def engines/dasync.def engines/e_padlock-x86.asm engines/ossltest.def engines/padlock.def libcrypto.def libcrypto.rc libssl.def libssl.rc test/buildtest_aes.c test/buildtest_asn1.c test/buildtest_asn1t.c test/buildtest_async.c test/buildtest_bio.c test/buildtest_blowfish.c test/buildtest_bn.c test/buildtest_buffer.c test/buildtest_camellia.c test/buildtest_cast.c test/buildtest_cmac.c test/buildtest_cms.c test/buildtest_comp.c test/buildtest_conf.c test/buildtest_conf_api.c test/buildtest_crypto.c test/buildtest_ct.c test/buildtest_des.c test/buildtest_dh.c test/buildtest_dsa.c test/buildtest_dtls1.c test/buildtest_e_os2.c test/buildtest_ebcdic.c test/buildtest_ec.c test/buildtest_ecdh.c test/buildtest_ecdsa.c test/buildtest_engine.c test/buildtest_evp.c test/buildtest_hmac.c test/buildtest_idea.c test/buildtest_kdf.c test/buildtest_lhash.c test/buildtest_md4.c test/buildtest_md5.c test/buildtest_mdc2.c test/buildtest_modes.c test/buildtest_obj_mac.c test/buildtest_objects.c test/buildtest_ocsp.c test/buildtest_opensslv.c test/buildtest_ossl_typ.c test/buildtest_pem.c test/buildtest_pem2.c test/buildtest_pkcs12.c test/buildtest_pkcs7.c test/buildtest_rand.c test/buildtest_rand_drbg.c test/buildtest_rc2.c test/buildtest_rc4.c test/buildtest_ripemd.c test/buildtest_rsa.c test/buildtest_safestack.c test/buildtest_seed.c test/buildtest_sha.c test/buildtest_srp.c test/buildtest_srtp.c test/buildtest_ssl.c test/buildtest_ssl2.c test/buildtest_stack.c test/buildtest_store.c test/buildtest_symhacks.c test/buildtest_tls1.c test/buildtest_ts.c test/buildtest_txt_db.c test/buildtest_ui.c test/buildtest_whrlpool.c test/buildtest_x509.c test/buildtest_x509_vfy.c test/buildtest_x509v3.c tools/c_rehash.pl providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_ml_dsa_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_slh_dsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c # Variables starting with LIB_ are used to build library object files # and shared libraries. @@ -42,6 +42,10 @@ include/crypto/dso_conf.h: include/crypto/dso_conf.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ -omakefile include/crypto/dso_conf.h.in > $@ +include/internal/param_names.h: include/internal/param_names.h.in configdata.pm + $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ + -omakefile include/internal/param_names.h.in > $@ + include/openssl/asn1.h: include/openssl/asn1.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/asn1.h.in" > $@ @@ -54,9 +58,15 @@ include/openssl/bio.h: include/openssl/bio.h.in configdata.pm include/openssl/cmp.h: include/openssl/cmp.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cmp.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/cms.h: include/openssl/cms.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cms.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/conf.h.in" > $@ @@ -66,6 +76,9 @@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm include/openssl/configuration.h: include/openssl/configuration.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/configuration.h.in" > $@ +include/openssl/core_names.h: include/openssl/core_names.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/core_names.h.in" > $@ include/openssl/crmf.h: include/openssl/crmf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/crmf.h.in" > $@ @@ -117,6 +130,9 @@ include/openssl/ui.h: include/openssl/ui.h.in configdata.pm include/openssl/x509.h: include/openssl/x509.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509.h.in" > $@ +include/openssl/x509_acert.h: include/openssl/x509_acert.h.in configdata.pm + $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ + "-omakefile" "include/openssl/x509_acert.h.in" > $@ include/openssl/x509_vfy.h: include/openssl/x509_vfy.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509_vfy.h.in" > $@ @@ -134,18 +150,24 @@ providers/common/include/prov/der_wrap.h: providers/common/include/prov/der_wrap providers/common/include/prov/der_rsa.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_rsa.h.in > $@ -providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_ecx.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ecx.h.in > $@ -providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_sm2.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_sm2.h.in > $@ -providers/common/include/prov/der_ec.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ec.h: providers/common/include/prov/der_ec.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ec.h.in > $@ -providers/common/include/prov/der_digests.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_digests.h: providers/common/include/prov/der_digests.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_digests.h.in > $@ +providers/common/include/prov/der_ml_dsa.h: providers/common/include/prov/der_ml_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ml_dsa.h.in > $@ + +providers/common/include/prov/der_slh_dsa.h: providers/common/include/prov/der_slh_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_slh_dsa.h.in > $@ + apps/progs.h: apps/progs.pl configdata.pm "$(PERL)" "apps/progs.pl" "$(APPS_OPENSSL)" > $@ @@ -1958,6 +1980,8 @@ crypto/ocsp/libcrypto-shlib-v3_ocsp.d: "crypto/ocsp/v3_ocsp.c" $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) /Zs /showIncludes "crypto/ocsp/v3_ocsp.c" 2>&1 > crypto/ocsp/libcrypto-shlib-v3_ocsp.d crypto/ocsp/libcrypto-shlib-v3_ocsp.obj: crypto/ocsp/libcrypto-shlib-v3_ocsp.d $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) -c $(COUTFLAG)$@ "crypto/ocsp/v3_ocsp.c" +crypto/params_idx.c: crypto/params_idx.c.in configdata.pm util/perl/OpenSSL/paramnames.pm + "$(PERL)" "-I." "-Iutil/perl" "-Mconfigdata" "-MOpenSSL::paramnames" "util/dofile.pl" "-omakefile" crypto/params_idx.c.in > $@ crypto/pem/libcrypto-shlib-pem_all.d: "crypto/pem/pem_all.c" $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) /Zs /showIncludes "crypto/pem/pem_all.c" 2>&1 > crypto/pem/libcrypto-shlib-pem_all.d crypto/pem/libcrypto-shlib-pem_all.obj: crypto/pem/libcrypto-shlib-pem_all.d @@ -5797,9 +5821,15 @@ providers/common/der/der_ec_gen.c: providers/common/der/der_ec_gen.c.in provider providers/common/der/der_ecx_gen.c: providers/common/der/der_ecx_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ecx_gen.c.in > $@ +providers/common/der/der_ml_dsa_gen.c: providers/common/der/der_ml_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ml_dsa_gen.c.in > $@ + providers/common/der/der_rsa_gen.c: providers/common/der/der_rsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_rsa_gen.c.in > $@ +providers/common/der/der_slh_dsa_gen.c: providers/common/der/der_slh_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_slh_dsa_gen.c.in > $@ + providers/common/der/der_sm2_gen.c: providers/common/der/der_sm2_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_sm2_gen.c.in > $@ diff --git a/deps/openssl/config/Makefile_VC-WIN64-ARM b/deps/openssl/config/Makefile_VC-WIN64-ARM index 52fc9cd9cf4fe8..a0a8a6c3a3bf8b 100644 --- a/deps/openssl/config/Makefile_VC-WIN64-ARM +++ b/deps/openssl/config/Makefile_VC-WIN64-ARM @@ -16,8 +16,8 @@ MINOR=1.1 SHLIB_VERSION_NUMBER=1.1 -GENERATED_MANDATORY=include/crypto/bn_conf.h include/crypto/dso_conf.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h -GENERATED=crypto/buildinf.h apps/progs.h providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c apps/progs.c providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c +GENERATED_MANDATORY=crypto/params_idx.c include/crypto/bn_conf.h include/crypto/dso_conf.h include/internal/param_names.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/comp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/core_names.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_acert.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_ml_dsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_slh_dsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h +GENERATED=crypto/buildinf.h apps/progs.h crypto/params_idx.c providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c apps/progs.c providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_ml_dsa_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_slh_dsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c INSTALL_LIBS="libcrypto.lib" "libssl.lib" INSTALL_SHLIBS="libcrypto-1_1-arm64.dll" "libssl-1_1-arm64.dll" @@ -138,6 +138,9 @@ apps/progs.h: apps/progs.c apps/progs.c: "$(PERL)" "apps/progs.pl" "-C" $(APPS_OPENSSL) > $@ +crypto/params_idx.c: crypto/params_idx.c.in configdata.pm util/perl/OpenSSL/paramnames.pm + "$(PERL)" "-I." "-Iutil/perl" "-Mconfigdata" "-MOpenSSL::paramnames" "util/dofile.pl" "-omakefile" crypto/params_idx.c.in > $@ + include/crypto/bn_conf.h: "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/crypto/bn_conf.h.in" > $@ @@ -145,6 +148,10 @@ include/crypto/dso_conf.h: "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/crypto/dso_conf.h.in" > $@ +include/internal/param_names.h: include/internal/param_names.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/internal/param_names.h.in" > $@ + include/openssl/asn1.h: include/openssl/asn1.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/asn1.h.in" > $@ @@ -157,9 +164,15 @@ include/openssl/bio.h: include/openssl/bio.h.in configdata.pm include/openssl/cmp.h: include/openssl/cmp.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cmp.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/cms.h: include/openssl/cms.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cms.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/conf.h.in" > $@ @@ -169,6 +182,9 @@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm include/openssl/configuration.h: include/openssl/configuration.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/configuration.h.in" > $@ +include/openssl/core_names.h: include/openssl/core_names.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/core_names.h.in" > $@ include/openssl/crmf.h: include/openssl/crmf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/crmf.h.in" > $@ @@ -220,6 +236,9 @@ include/openssl/ui.h: include/openssl/ui.h.in configdata.pm include/openssl/x509.h: include/openssl/x509.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509.h.in" > $@ +include/openssl/x509_acert.h: include/openssl/x509_acert.h.in configdata.pm + $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ + "-omakefile" "include/openssl/x509_acert.h.in" > $@ include/openssl/x509_vfy.h: include/openssl/x509_vfy.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509_vfy.h.in" > $@ @@ -234,21 +253,27 @@ providers/common/include/prov/der_dsa.h: providers/common/include/prov/der_dsa.h providers/common/include/prov/der_wrap.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_wrap.h.in > $@ -providers/common/include/prov/der_rsa.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_rsa.h: providers/common/include/prov/der_rsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_rsa.h.in > $@ -providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_ecx.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ecx.h.in > $@ -providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_sm2.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_sm2.h.in > $@ -providers/common/include/prov/der_ec.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ec.h: providers/common/include/prov/der_ec.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ec.h.in > $@ -providers/common/include/prov/der_digests.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_digests.h: providers/common/include/prov/der_digests.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_digests.h.in > $@ +providers/common/include/prov/der_ml_dsa.h: providers/common/include/prov/der_ml_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ml_dsa.h.in > $@ + +providers/common/include/prov/der_slh_dsa.h: providers/common/include/prov/der_slh_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_slh_dsa.h.in > $@ + providers/common/der/der_digests_gen.c: providers/common/der/der_digests_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_digests_gen.c.in > $@ @@ -262,9 +287,15 @@ providers/common/der/der_ec_gen.c: providers/common/der/der_ec_gen.c.in provider providers/common/der/der_ecx_gen.c: providers/common/der/der_ecx_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ecx_gen.c.in > $@ +providers/common/der/der_ml_dsa_gen.c: providers/common/der/der_ml_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ml_dsa_gen.c.in > $@ + providers/common/der/der_rsa_gen.c: providers/common/der/der_rsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_rsa_gen.c.in > $@ +providers/common/der/der_slh_dsa_gen.c: providers/common/der/der_slh_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_slh_dsa_gen.c.in > $@ + providers/common/der/der_sm2_gen.c: providers/common/der/der_sm2_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_sm2_gen.c.in > $@ diff --git a/deps/openssl/config/Makefile_VC-WIN64A b/deps/openssl/config/Makefile_VC-WIN64A index d8fde1e0f39652..5bc8263188833e 100644 --- a/deps/openssl/config/Makefile_VC-WIN64A +++ b/deps/openssl/config/Makefile_VC-WIN64A @@ -5,8 +5,8 @@ RM= rm -f AS=nasm ASFLAGS=-g -GENERATED_MANDATORY=include/crypto/bn_conf.h include/crypto/dso_conf.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h -GENERATED=apps/CA.pl apps/openssl.rc apps/progs.h apps/tsget.pl crypto/aes/aes-x86_64.asm crypto/aes/aesni-mb-x86_64.asm crypto/aes/aesni-sha1-x86_64.asm crypto/aes/aesni-sha256-x86_64.asm crypto/aes/aesni-x86_64.asm crypto/aes/bsaes-x86_64.asm crypto/aes/vpaes-x86_64.asm crypto/bn/rsaz-avx2.asm crypto/bn/rsaz-x86_64.asm crypto/bn/x86_64-gf2m.asm crypto/bn/x86_64-mont.asm crypto/bn/x86_64-mont5.asm crypto/buildinf.h crypto/camellia/cmll-x86_64.asm crypto/chacha/chacha-x86_64.asm crypto/ec/ecp_nistz256-x86_64.asm crypto/ec/x25519-x86_64.asm crypto/md5/md5-x86_64.asm crypto/modes/aesni-gcm-x86_64.asm crypto/modes/ghash-x86_64.asm crypto/poly1305/poly1305-x86_64.asm crypto/rc4/rc4-md5-x86_64.asm crypto/rc4/rc4-x86_64.asm crypto/sha/keccak1600-x86_64.asm crypto/sha/sha1-mb-x86_64.asm crypto/sha/sha1-x86_64.asm crypto/sha/sha256-mb-x86_64.asm crypto/sha/sha256-x86_64.asm crypto/sha/sha512-x86_64.asm crypto/uplink-x86_64.asm crypto/whrlpool/wp-x86_64.asm crypto/x86_64cpuid.asm engines/e_padlock-x86_64.asm libcrypto.def libcrypto.rc libssl.def libssl.rc test/buildtest_aes.c test/buildtest_asn1.c test/buildtest_asn1t.c test/buildtest_async.c test/buildtest_bio.c test/buildtest_blowfish.c test/buildtest_bn.c test/buildtest_buffer.c test/buildtest_camellia.c test/buildtest_cast.c test/buildtest_cmac.c test/buildtest_cms.c test/buildtest_comp.c test/buildtest_conf.c test/buildtest_conf_api.c test/buildtest_crypto.c test/buildtest_ct.c test/buildtest_des.c test/buildtest_dh.c test/buildtest_dsa.c test/buildtest_dtls1.c test/buildtest_e_os2.c test/buildtest_ebcdic.c test/buildtest_ec.c test/buildtest_ecdh.c test/buildtest_ecdsa.c test/buildtest_engine.c test/buildtest_evp.c test/buildtest_hmac.c test/buildtest_idea.c test/buildtest_kdf.c test/buildtest_lhash.c test/buildtest_md4.c test/buildtest_md5.c test/buildtest_mdc2.c test/buildtest_modes.c test/buildtest_obj_mac.c test/buildtest_objects.c test/buildtest_ocsp.c test/buildtest_opensslv.c test/buildtest_ossl_typ.c test/buildtest_pem.c test/buildtest_pem2.c test/buildtest_pkcs12.c test/buildtest_pkcs7.c test/buildtest_rand.c test/buildtest_rand_drbg.c test/buildtest_rc2.c test/buildtest_rc4.c test/buildtest_ripemd.c test/buildtest_rsa.c test/buildtest_safestack.c test/buildtest_seed.c test/buildtest_sha.c test/buildtest_srp.c test/buildtest_srtp.c test/buildtest_ssl.c test/buildtest_ssl2.c test/buildtest_stack.c test/buildtest_store.c test/buildtest_symhacks.c test/buildtest_tls1.c test/buildtest_ts.c test/buildtest_txt_db.c test/buildtest_ui.c test/buildtest_whrlpool.c test/buildtest_x509.c test/buildtest_x509_vfy.c test/buildtest_x509v3.c tools/c_rehash.pl providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_rsa_gen.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c +GENERATED_MANDATORY=crypto/params_idx.c include/crypto/bn_conf.h include/crypto/dso_conf.h include/internal/param_names.h include/openssl/asn1.h include/openssl/asn1t.h include/openssl/bio.h include/openssl/cmp.h include/openssl/comp.h include/openssl/cms.h include/openssl/conf.h include/openssl/configuration.h include/openssl/core_names.h include/openssl/crmf.h include/openssl/crypto.h include/openssl/ct.h include/openssl/err.h include/openssl/ess.h include/openssl/fipskey.h include/openssl/lhash.h include/openssl/ocsp.h include/openssl/opensslv.h include/openssl/pkcs12.h include/openssl/pkcs7.h include/openssl/safestack.h include/openssl/srp.h include/openssl/ssl.h include/openssl/ui.h include/openssl/x509.h include/openssl/x509_acert.h include/openssl/x509_vfy.h include/openssl/x509v3.h providers/common/include/prov/der_digests.h providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_ml_dsa.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_slh_dsa.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_wrap.h +GENERATED=apps/CA.pl apps/openssl.rc apps/progs.h apps/tsget.pl crypto/aes/aes-x86_64.asm crypto/aes/aesni-mb-x86_64.asm crypto/aes/aesni-sha1-x86_64.asm crypto/aes/aesni-sha256-x86_64.asm crypto/aes/aesni-x86_64.asm crypto/aesni-xts-avx512.asm crypto/aes/bsaes-x86_64.asm crypto/aes/vpaes-x86_64.asm crypto/bn/rsaz-avx2.asm crypto/bn/rsaz-2k-avx512.asm crypto/bn/rsaz-2k-avxifma.asm crypto/bn/rsaz-3k-avx512.asm crypto/bn/rsaz-3k-avxifma.asm crypto/bn/rsaz-4k-avx512.asm crypto/bn/rsaz-4k-avxifma.asm crypto/bn/rsaz-x86_64.asm crypto/bn/x86_64-gf2m.asm crypto/bn/x86_64-mont.asm crypto/bn/x86_64-mont5.asm crypto/buildinf.h crypto/camellia/cmll-x86_64.asm crypto/chacha/chacha-x86_64.asm crypto/ec/ecp_nistz256-x86_64.asm crypto/ec/x25519-x86_64.asm crypto/md5/md5-x86_64.asm crypto/modes/aes-gcm-avx512.asm crypto/modes/aesni-gcm-x86_64.asm crypto/modes/ghash-x86_64.asm crypto/params_idx.c crypto/poly1305/poly1305-x86_64.asm crypto/rc4/rc4-md5-x86_64.asm crypto/rc4/rc4-x86_64.asm crypto/sha/keccak1600-x86_64.asm crypto/sha/sha1-mb-x86_64.asm crypto/sha/sha1-x86_64.asm crypto/sha/sha256-mb-x86_64.asm crypto/sha/sha256-x86_64.asm crypto/sha/sha512-x86_64.asm crypto/uplink-x86_64.asm crypto/whrlpool/wp-x86_64.asm crypto/x86_64cpuid.asm engines/e_padlock-x86_64.asm libcrypto.def libcrypto.rc libssl.def libssl.rc test/buildtest_aes.c test/buildtest_asn1.c test/buildtest_asn1t.c test/buildtest_async.c test/buildtest_bio.c test/buildtest_blowfish.c test/buildtest_bn.c test/buildtest_buffer.c test/buildtest_camellia.c test/buildtest_cast.c test/buildtest_cmac.c test/buildtest_cms.c test/buildtest_comp.c test/buildtest_conf.c test/buildtest_conf_api.c test/buildtest_crypto.c test/buildtest_ct.c test/buildtest_des.c test/buildtest_dh.c test/buildtest_dsa.c test/buildtest_dtls1.c test/buildtest_e_os2.c test/buildtest_ebcdic.c test/buildtest_ec.c test/buildtest_ecdh.c test/buildtest_ecdsa.c test/buildtest_engine.c test/buildtest_evp.c test/buildtest_hmac.c test/buildtest_idea.c test/buildtest_kdf.c test/buildtest_lhash.c test/buildtest_md4.c test/buildtest_md5.c test/buildtest_mdc2.c test/buildtest_modes.c test/buildtest_obj_mac.c test/buildtest_objects.c test/buildtest_ocsp.c test/buildtest_opensslv.c test/buildtest_ossl_typ.c test/buildtest_pem.c test/buildtest_pem2.c test/buildtest_pkcs12.c test/buildtest_pkcs7.c test/buildtest_rand.c test/buildtest_rand_drbg.c test/buildtest_rc2.c test/buildtest_rc4.c test/buildtest_ripemd.c test/buildtest_rsa.c test/buildtest_safestack.c test/buildtest_seed.c test/buildtest_sha.c test/buildtest_srp.c test/buildtest_srtp.c test/buildtest_ssl.c test/buildtest_ssl2.c test/buildtest_stack.c test/buildtest_store.c test/buildtest_symhacks.c test/buildtest_tls1.c test/buildtest_ts.c test/buildtest_txt_db.c test/buildtest_ui.c test/buildtest_whrlpool.c test/buildtest_x509.c test/buildtest_x509_vfy.c test/buildtest_x509v3.c tools/c_rehash.pl providers/common/der/der_digests_gen.c providers/common/der/der_dsa_gen.c providers/common/der/der_ec_gen.c providers/common/der/der_ecx_gen.c providers/common/der/der_ml_dsa.c providers/common/der/der_rsa_gen.c providers/common/der/der_slh_dsa.c providers/common/der/der_sm2_gen.c providers/common/der/der_wrap_gen.c PERLASM_SCHEME= auto APPS_OPENSSL=apps/openssl @@ -18,6 +18,10 @@ include/crypto/dso_conf.h: include/crypto/dso_conf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/crypto/dso_conf.h.in" > $@ +include/internal/param_names.h: include/internal/param_names.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/internal/param_names.h.in" > $@ + include/openssl/asn1.h: include/openssl/asn1.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/asn1.h.in" > $@ @@ -30,9 +34,15 @@ include/openssl/bio.h: include/openssl/bio.h.in configdata.pm include/openssl/cmp.h: include/openssl/cmp.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cmp.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/cms.h: include/openssl/cms.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/cms.h.in" > $@ +include/openssl/comp.h: include/openssl/comp.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/comp.h.in" > $@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/conf.h.in" > $@ @@ -42,6 +52,9 @@ include/openssl/conf.h: include/openssl/conf.h.in configdata.pm include/openssl/configuration.h: include/openssl/configuration.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/configuration.h.in" > $@ +include/openssl/core_names.h: include/openssl/core_names.h.in configdata.pm + "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ + "-omakefile" "include/openssl/core_names.h.in" > $@ include/openssl/crmf.h: include/openssl/crmf.h.in configdata.pm "$(PERL)" "-I$(BLDDIR)" -Mconfigdata "util/dofile.pl" \ "-omakefile" "include/openssl/crmf.h.in" > $@ @@ -93,6 +106,9 @@ include/openssl/ui.h: include/openssl/ui.h.in configdata.pm include/openssl/x509.h: include/openssl/x509.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509.h.in" > $@ +include/openssl/x509_acert.h: include/openssl/x509_acert.h.in configdata.pm + $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ + "-omakefile" "include/openssl/x509_acert.h.in" > $@ include/openssl/x509_vfy.h: include/openssl/x509_vfy.h.in configdata.pm $(PERL) -I$(BLDDIR) -Mconfigdata util/dofile.pl \ "-omakefile" "include/openssl/x509_vfy.h.in" > $@ @@ -106,21 +122,26 @@ providers/common/include/prov/der_dsa.h: providers/common/include/prov/der_dsa.h providers/common/include/prov/der_wrap.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_wrap.h.in > $@ -providers/common/include/prov/der_rsa.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_rsa.h: providers/common/include/prov/der_rsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_rsa.h.in > $@ -providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ecx.h: providers/common/include/prov/der_ecx.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ecx.h.in > $@ -providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_sm2.h: providers/common/include/prov/der_sm2.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_sm2.h.in > $@ -providers/common/include/prov/der_ec.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_ec.h: providers/common/include/prov/der_ec.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ec.h.in > $@ -providers/common/include/prov/der_digests.h: providers/common/include/prov/der_wrap.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm +providers/common/include/prov/der_digests.h: providers/common/include/prov/der_digests.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_digests.h.in > $@ +providers/common/include/prov/der_ml_dsa.h: providers/common/include/prov/der_ml_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_ml_dsa.h.in > $@ + +providers/common/include/prov/der_slh_dsa.h: providers/common/include/prov/der_slh_dsa.h.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/include/prov/der_slh_dsa.h.in > $@ apps/progs.h: apps/progs.c @@ -183,6 +204,11 @@ crypto/aes/aesni-x86_64.obj: "crypto/aes/aesni-x86_64.asm" crypto/aes/aesni-x86_64.asm: set ASM=$(AS) "$(PERL)" "crypto/aes/asm/aesni-x86_64.pl" $(PERLASM_SCHEME) $@ +crypto/aes/aesni-xts-avx512.obj: "crypto/aes/aesni-xts-avx512.asm" + $(AS) $(LIB_ASFLAGS) $(ASOUTFLAG)$@ "crypto/aes/aesni-xts-avx512.asm" +crypto/aes/aesni-xts-avx512.asm: + set ASM=$(AS) + "$(PERL)" "crypto/aes/asm/aesni-xts-avx512.pl" $(PERLASM_SCHEME) $@ crypto/aes/bsaes-x86_64.obj: "crypto/aes/bsaes-x86_64.asm" $(AS) $(LIB_ASFLAGS) $(ASOUTFLAG)$@ "crypto/aes/bsaes-x86_64.asm" crypto/aes/bsaes-x86_64.asm: @@ -734,9 +760,24 @@ crypto/bn/rsaz-avx2.obj: "crypto/bn/rsaz-avx2.asm" crypto/bn/rsaz-avx2.asm: set ASM=$(AS) "$(PERL)" "crypto/bn/asm/rsaz-avx2.pl" $(PERLASM_SCHEME) $@ -crypto/bn/rsaz-avx512.asm: +crypto/bn/rsaz-2k-avx512.asm: set ASM=$(AS) - "$(PERL)" "crypto/bn/asm/rsaz-avx512.pl" $(PERLASM_SCHEME) $@ + "$(PERL)" "crypto/bn/asm/rsaz-2k-avx512.pl" $(PERLASM_SCHEME) $@ +crypto/bn/rsaz-2k-avxifma.asm: + set ASM=$(AS) + "$(PERL)" "crypto/bn/asm/rsaz-2k-avxifma.pl" $(PERLASM_SCHEME) $@ +crypto/bn/rsaz-3k-avx512.asm: + set ASM=$(AS) + "$(PERL)" "crypto/bn/asm/rsaz-3k-avx512.pl" $(PERLASM_SCHEME) $@ +crypto/bn/rsaz-3k-avxifma.asm: + set ASM=$(AS) + "$(PERL)" "crypto/bn/asm/rsaz-3k-avxifma.pl" $(PERLASM_SCHEME) $@ +crypto/bn/rsaz-4k-avx512.asm: + set ASM=$(AS) + "$(PERL)" "crypto/bn/asm/rsaz-4k-avx512.pl" $(PERLASM_SCHEME) $@ +crypto/bn/rsaz-4k-avxifma.asm: + set ASM=$(AS) + "$(PERL)" "crypto/bn/asm/rsaz-4k-avxifma.pl" $(PERLASM_SCHEME) $@ crypto/bn/rsaz-x86_64.obj: "crypto/bn/rsaz-x86_64.asm" $(AS) $(LIB_ASFLAGS) $(ASOUTFLAG)$@ "crypto/bn/rsaz-x86_64.asm" crypto/bn/rsaz-x86_64.asm: @@ -1816,6 +1857,9 @@ crypto/mem_sec.d: "crypto/mem_sec.c" $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) /Zs /showIncludes "crypto/mem_sec.c" 2>&1 > crypto/mem_sec.d crypto/mem_sec.obj: crypto/mem_sec.d $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) -c $(COUTFLAG)$@ "crypto/mem_sec.c" +crypto/modes/aes-gcm-avx512.asm: + set ASM=$(AS) + "$(PERL)" "crypto/modes/asm/aes-gcm-avx512.pl" $(PERLASM_SCHEME) $@ crypto/modes/aesni-gcm-x86_64.obj: "crypto/modes/aesni-gcm-x86_64.asm" $(AS) $(LIB_ASFLAGS) $(ASOUTFLAG)$@ "crypto/modes/aesni-gcm-x86_64.asm" crypto/modes/aesni-gcm-x86_64.asm: @@ -1950,6 +1994,8 @@ crypto/ocsp/v3_ocsp.d: "crypto/ocsp/v3_ocsp.c" $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) /Zs /showIncludes "crypto/ocsp/v3_ocsp.c" 2>&1 > crypto/ocsp/v3_ocsp.d crypto/ocsp/v3_ocsp.obj: crypto/ocsp/v3_ocsp.d $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) -c $(COUTFLAG)$@ "crypto/ocsp/v3_ocsp.c" +crypto/params_idx.c: crypto/params_idx.c.in configdata.pm util/perl/OpenSSL/paramnames.pm + "$(PERL)" "-I." "-Iutil/perl" "-Mconfigdata" "-MOpenSSL::paramnames" "util/dofile.pl" "-omakefile" crypto/params_idx.c.in > $@ crypto/pem/pem_all.d: "crypto/pem/pem_all.c" $(CC) $(LIB_CFLAGS) /I "." /I "crypto/include" /I "include" $(LIB_CPPFLAGS) /Zs /showIncludes "crypto/pem/pem_all.c" 2>&1 > crypto/pem/pem_all.d crypto/pem/pem_all.obj: crypto/pem/pem_all.d @@ -3070,9 +3116,15 @@ providers/common/der/der_ec_gen.c: providers/common/der/der_ec_gen.c.in provider providers/common/der/der_ecx_gen.c: providers/common/der/der_ecx_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ecx_gen.c.in > $@ +providers/common/der/der_ml_dsa_gen.c: providers/common/der/der_ml_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_ml_dsa_gen.c.in > $@ + providers/common/der/der_rsa_gen.c: providers/common/der/der_rsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_rsa_gen.c.in > $@ +providers/common/der/der_slh_dsa_gen.c: providers/common/der/der_slh_dsa_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm + $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_slh_dsa_gen.c.in > $@ + providers/common/der/der_sm2_gen.c: providers/common/der/der_sm2_gen.c.in providers/common/der/oids_to_c.pm configdata.pm providers/common/der/oids_to_c.pm $(PERL) "-I." "-Iproviders/common/der" -Mconfigdata -Moids_to_c "util/dofile.pl" "-oMakefile" providers/common/der/der_sm2_gen.c.in > $@ diff --git a/deps/openssl/config/archs/BSD-x86/asm/configdata.pm b/deps/openssl/config/archs/BSD-x86/asm/configdata.pm index 39e95e30234882..2d6abc8184cdff 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h index b746f98ac210c5..93545e7fcec163 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Mar 5 20:59:02 2025 UTC" +#define DATE "built on: Sun Jul 20 00:45:41 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm b/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm index 83d433e4553ac2..b1de949bf4761b 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h index 27f03cc9d7763b..f8db7c7b0372fa 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Mar 5 20:59:15 2025 UTC" +#define DATE "built on: Sun Jul 20 00:45:53 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm b/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm index 770a3a3326a3fa..867ed934bdccc1 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -202,7 +202,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h index 4c40b7b89133fb..9b11cde92c0301 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Mar 5 20:59:28 2025 UTC" +#define DATE "built on: Sun Jul 20 00:46:06 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm index c57089f221c51a..d2b569260f5e6c 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h index ec4aaf9a16fe37..8b75c2e7cdad2a 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Mar 5 20:59:39 2025 UTC" +#define DATE "built on: Sun Jul 20 00:46:18 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm index e918957e5c4e57..ca218619df1f17 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h index 04524de22c1978..a2efc4655115f8 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Mar 5 20:59:54 2025 UTC" +#define DATE "built on: Sun Jul 20 00:46:33 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm index 559f9276be9920..11790411015e3e 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -202,7 +202,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h index be71bd3c561a72..e2ff6c7729ef02 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Mar 5 21:00:10 2025 UTC" +#define DATE "built on: Sun Jul 20 00:46:48 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm b/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm index ed4f789fd2db28..2f73525b1e423a 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm @@ -165,7 +165,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -216,7 +216,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x5565b669dcb0)", + "RANLIB" => "CODE(0x55b1c3240528)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h index 48ff5ac1c5a899..0aa05955cb7ded 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:09:10 2025 UTC" +#define DATE "built on: Sun Jul 20 00:55:52 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm b/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm index 815f2a5d15e483..e9d7ebaf5ec8e4 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm @@ -165,7 +165,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -216,7 +216,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55faeca92fa0)", + "RANLIB" => "CODE(0x55c13a704328)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h index 4072d827907f5c..f90d9391b1b15c 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:09:22 2025 UTC" +#define DATE "built on: Sun Jul 20 00:56:04 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm index 2b33a586730255..a92b4f30d07262 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm @@ -163,7 +163,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -215,7 +215,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x555d90669138)", + "RANLIB" => "CODE(0x556f297fb5a8)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h index 602c9cefde797b..8be833a43a513d 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:09:33 2025 UTC" +#define DATE "built on: Sun Jul 20 00:56:16 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm index 8a29ae7726d4fe..8947bd53f11632 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm @@ -163,7 +163,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -213,7 +213,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -266,11 +266,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN64-ARM", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -283,7 +283,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x560478385a28)", + "RANLIB" => "CODE(0x55be6aa845d8)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h index dfdc92098dec5e..4ee1f68c013b77 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: VC-WIN64-ARM" -#define DATE "built on: Wed Mar 5 21:09:44 2025 UTC" +#define DATE "built on: Sun Jul 20 00:56:26 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm index 61c50ea51d063d..026579ed1d6bad 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm @@ -168,7 +168,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -219,7 +219,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55fdfa62e9a0)", + "RANLIB" => "CODE(0x55a1fa929968)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h index 5e4f9fa85b782b..050b7f8f6a2db0 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:08:30 2025 UTC" +#define DATE "built on: Sun Jul 20 00:55:12 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm index cdd5aa9852dbb6..b774af6c5bb0b0 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm @@ -168,7 +168,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -219,7 +219,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55884154b690)", + "RANLIB" => "CODE(0x5590ca26eb18)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h index 9757142ff5fe68..4271571db24d0a 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:08:44 2025 UTC" +#define DATE "built on: Sun Jul 20 00:55:27 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm index 193001b8aae923..854195aa427b28 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm @@ -166,7 +166,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -218,7 +218,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55b60e60bac8)", + "RANLIB" => "CODE(0x562f363ed508)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h index f0be853b9669cc..2c32ce450f283c 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Mar 5 21:08:59 2025 UTC" +#define DATE "built on: Sun Jul 20 00:55:41 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h index c0590cffa999a6..94f44daf376f8e 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm index 3e480f50048240..400d206fd0c3f7 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar -X64", diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h index 7428cb5cab8f76..a4b827fcfdb29d 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Mar 5 20:58:26 2025 UTC" +#define DATE "built on: Sun Jul 20 00:45:04 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm index b98107d8f67bf9..393fad24afa5b7 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar -X64", diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h index 6b0cc03405c2ad..4bf187469f24fe 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Mar 5 20:58:38 2025 UTC" +#define DATE "built on: Sun Jul 20 00:45:17 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm index 2e8a240a9868c1..c291a8d8d7550a 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar -X64", diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h index d00971063b8b48..28b322a172b0d4 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Mar 5 20:58:51 2025 UTC" +#define DATE "built on: Sun Jul 20 00:45:29 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm index fa10bed6fd6378..6fe0a303ac84ac 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h index 810053f7be5645..efc6054598df82 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Mar 5 21:01:02 2025 UTC" +#define DATE "built on: Sun Jul 20 00:47:41 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm index 9bf997165c3e08..ee6e183540fbfd 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h index dfe6f4feb0011e..07eee0541a9085 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Mar 5 21:01:15 2025 UTC" +#define DATE "built on: Sun Jul 20 00:47:54 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm index 639b55a717b993..1ae9b1f168281e 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h index b7162945595dfd..3cd3f564e0d1eb 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Mar 5 21:01:27 2025 UTC" +#define DATE "built on: Sun Jul 20 00:48:07 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm index ae8c62971c1a23..b5d5870d3043ac 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h index 95c41f05765710..26de87668b8f01 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Mar 5 21:01:39 2025 UTC" +#define DATE "built on: Sun Jul 20 00:48:18 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm index 125bb77e2f8c72..a623e9af23019a 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h index 8aa804c510ffe5..ae034bdf04e5bb 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Mar 5 21:01:51 2025 UTC" +#define DATE "built on: Sun Jul 20 00:48:30 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm index aeb47e095d8bf6..a1637a4150cbdb 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h index d8139bf78a0a80..f729490565601c 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Mar 5 21:02:03 2025 UTC" +#define DATE "built on: Sun Jul 20 00:48:42 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm index d30d5f5326758b..69f766218f5191 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h index 99175da0e9b9fb..b56e7908a5289a 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Mar 5 21:00:21 2025 UTC" +#define DATE "built on: Sun Jul 20 00:47:00 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm index f504cbca94b1c6..552c8114dbd08e 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h index f21ee48fcccf30..7a7d4993f210f8 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Mar 5 21:00:36 2025 UTC" +#define DATE "built on: Sun Jul 20 00:47:15 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm index 853874fcaa33e7..042e010cd6665a 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h index 061a0193d55e39..ac87a9b79e256c 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Mar 5 21:00:51 2025 UTC" +#define DATE "built on: Sun Jul 20 00:47:30 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm b/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm index e7dd562dcf9949..929ed405307bfd 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h index 9b5fa460089703..ba449cee7440d2 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Mar 5 21:02:14 2025 UTC" +#define DATE "built on: Sun Jul 20 00:48:54 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm index 0e7998767e7b63..a99cabfb9e138e 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h index e4f6099c6f70a8..f19b35ece0bf82 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Mar 5 21:02:26 2025 UTC" +#define DATE "built on: Sun Jul 20 00:49:06 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm b/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm index dee85d62d789e0..ed65e22d411ba0 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h index f011f915e4e457..367c678caab527 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Mar 5 21:02:39 2025 UTC" +#define DATE "built on: Sun Jul 20 00:49:18 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/asm/configdata.pm b/deps/openssl/config/archs/linux-armv4/asm/configdata.pm index 301b8298786081..07d8870da90f92 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h index 9fbe102da05dc3..0b40c3d4700901 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Mar 5 21:02:50 2025 UTC" +#define DATE "built on: Sun Jul 20 00:49:30 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm index c6045ab640a969..52d02590cce526 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h index 65593d5b1395c8..66a200afb92f0f 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Mar 5 21:03:03 2025 UTC" +#define DATE "built on: Sun Jul 20 00:49:42 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm b/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm index 3783c4b8d52ecb..6c5136f149b6b7 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h index f317b588f51938..119e00a73ff241 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Mar 5 21:03:15 2025 UTC" +#define DATE "built on: Sun Jul 20 00:49:55 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/asm/configdata.pm b/deps/openssl/config/archs/linux-elf/asm/configdata.pm index 4230adf831e541..a196f0e94c9c57 100644 --- a/deps/openssl/config/archs/linux-elf/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h index b0d1da6fd25e1d..09d8c51fd6634b 100644 --- a/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Mar 5 21:03:26 2025 UTC" +#define DATE "built on: Sun Jul 20 00:50:06 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm index f2e251ddbc0ca5..d471b0f12c6e0c 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h index 1113f7c9c54c73..2beeed686ee37d 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Mar 5 21:03:39 2025 UTC" +#define DATE "built on: Sun Jul 20 00:50:19 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm b/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm index 33c8a7298c2885..46eb6219b0cc4e 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h index 732212bb3ef9ff..33618c608ece87 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Mar 5 21:03:52 2025 UTC" +#define DATE "built on: Sun Jul 20 00:50:32 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm index f2225dbc54eda9..3b995f67c36697 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h index 42c879116598fb..845aed0d962ee4 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Mar 5 21:04:45 2025 UTC" +#define DATE "built on: Sun Jul 20 00:51:26 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm index d56fa2d54c7bda..682c5800a799e2 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h index d689af3d0377dd..c1b7325cdac31b 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Mar 5 21:04:58 2025 UTC" +#define DATE "built on: Sun Jul 20 00:51:38 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm index 254be4364f3ebf..22ffb5f8361da7 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h index e4ba8ad06045d0..e9af08baf9e967 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Mar 5 21:05:10 2025 UTC" +#define DATE "built on: Sun Jul 20 00:51:51 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm b/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm index a84203c4b5be8d..4c8b56f03574d7 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h index 98989e9a320464..85ed63989facdb 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Mar 5 21:04:03 2025 UTC" +#define DATE "built on: Sun Jul 20 00:50:44 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm index 9cb66bbfb1dca2..466f7bbe99a8bf 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h index 109d6e9a86b37a..92ff88da18b515 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Mar 5 21:04:19 2025 UTC" +#define DATE "built on: Sun Jul 20 00:50:59 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm b/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm index b4bbde59a6509f..615a28e0482a88 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h index 0c2e38659674ef..a8ba669b8ef7bc 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Mar 5 21:04:34 2025 UTC" +#define DATE "built on: Sun Jul 20 00:51:14 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm b/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm index 2aed4d40439706..001fef332e467e 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h index 746e7a850bd8c1..ab84fe53528760 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Mar 5 21:05:22 2025 UTC" +#define DATE "built on: Sun Jul 20 00:52:02 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm index 4ba3fdb2ce61ea..dc2245643c537a 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h index 8520b0b6871835..75f3682eb3eee6 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Mar 5 21:05:34 2025 UTC" +#define DATE "built on: Sun Jul 20 00:52:15 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm b/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm index 3203fac4e66983..128e0919618c81 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h index cfbf11db75cd71..4f50d8c0adced1 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Mar 5 21:05:47 2025 UTC" +#define DATE "built on: Sun Jul 20 00:52:28 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm index 9356c6d20f9c0d..cfd5172e0f3185 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-loongarch64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h index 35f4088f697a76..0ac7eb9e2e66fb 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-loongarch64" -#define DATE "built on: Wed Mar 5 21:10:06 2025 UTC" +#define DATE "built on: Sun Jul 20 00:56:48 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm b/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm index 3791b477028d96..36358953386d30 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm @@ -162,7 +162,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -210,7 +210,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -262,11 +262,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h index 7bbf51db1e9d5d..269346fa169e90 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Mar 5 21:06:35 2025 UTC" +#define DATE "built on: Sun Jul 20 00:53:17 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm index 64118999c0d5ce..6780cc3de35e5e 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm @@ -162,7 +162,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -210,7 +210,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -262,11 +262,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h index e9c4ce02098106..7ec1fddf3971f2 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Mar 5 21:06:47 2025 UTC" +#define DATE "built on: Sun Jul 20 00:53:28 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm index 620cce510e55ce..83be5b7f5fa2e5 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h index 37ff606b896e2f..1698e195198832 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Mar 5 21:06:59 2025 UTC" +#define DATE "built on: Sun Jul 20 00:53:40 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm index 965841bcbfc8b9..6cf1f4c00e1039 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-riscv64", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h index e2572b49a0da50..ff0383477705fe 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-riscv64" -#define DATE "built on: Wed Mar 5 21:09:54 2025 UTC" +#define DATE "built on: Sun Jul 20 00:56:37 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm b/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm index abc3528c063460..b480cb84f991d2 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h index d2d018bb826d83..a9eab5b436ed3b 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Mar 5 21:05:58 2025 UTC" +#define DATE "built on: Sun Jul 20 00:52:39 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm index ce5beb94026504..f144a56b1479b2 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h index a4caeb83b31bb8..57099a6798ad63 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Mar 5 21:06:11 2025 UTC" +#define DATE "built on: Sun Jul 20 00:52:52 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm index 600b00670d49b9..d777ce0ff2729f 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h index 1a15fe8b54d607..69cdb20b71e7b5 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Mar 5 21:06:24 2025 UTC" +#define DATE "built on: Sun Jul 20 00:53:05 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm index 29bb1851e3fcf9..d05c08d6ad9c59 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h index a447f23eeb173b..30118ac2386260 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Mar 5 21:07:10 2025 UTC" +#define DATE "built on: Sun Jul 20 00:53:52 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm index b596850adb7464..44208d062f9938 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h index 23cce74e29506b..b72c96e93da9a9 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Mar 5 21:07:23 2025 UTC" +#define DATE "built on: Sun Jul 20 00:54:04 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm index 3861ddf22ae7b6..9a578576e7b746 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h index 53c0d8b1a0867c..5e2b1057a8d4cf 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Mar 5 21:07:35 2025 UTC" +#define DATE "built on: Sun Jul 20 00:54:17 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm index 9c48df85f8f0c9..82cf2379c2899e 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h index 419598dbfc385b..b553c36eaffdb9 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Mar 5 21:07:47 2025 UTC" +#define DATE "built on: Sun Jul 20 00:54:29 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm index 78ff7abbcfcd77..bd64192a59f549 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h index d10909b5f3a37a..0b26319549d98a 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Mar 5 21:08:02 2025 UTC" +#define DATE "built on: Sun Jul 20 00:54:44 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm index 21482b0425020d..f13b28dcb87471 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.16", + "full_version" => "3.0.17", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "16", + "patch" => "17", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.38.2", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "11 Feb 2025", + "release_date" => "1 Jul 2025", "shlib_version" => "3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.16" + "version" => "3.0.17" ); our %target = ( "AR" => "ar", diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h index cb9544336c42ba..8e887b69c73e9b 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Mar 5 21:08:18 2025 UTC" +#define DATE "built on: Sun Jul 20 00:55:00 2025 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h index 8e11963343e9fa..5fb5bc63056fe0 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 16 +# define OPENSSL_VERSION_PATCH 17 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.16" -# define OPENSSL_FULL_VERSION_STR "3.0.16" +# define OPENSSL_VERSION_STR "3.0.17" +# define OPENSSL_FULL_VERSION_STR "3.0.17" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "11 Feb 2025" +# define OPENSSL_RELEASE_DATE "1 Jul 2025" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.16 11 Feb 2025" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.17 1 Jul 2025" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/generate_gypi.pl b/deps/openssl/config/generate_gypi.pl index be7d357f68cc62..95373e5bc5626d 100755 --- a/deps/openssl/config/generate_gypi.pl +++ b/deps/openssl/config/generate_gypi.pl @@ -48,15 +48,15 @@ # Generate arch dependent header files with Makefile my $buildinf = "crypto/buildinf.h"; my $progs = "apps/progs.h"; -my $prov_headers = "providers/common/include/prov/der_dsa.h providers/common/include/prov/der_wrap.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_digests.h"; +my $prov_headers = "providers/common/include/prov/der_dsa.h providers/common/include/prov/der_ml_dsa.h providers/common/include/prov/der_slh_dsa.h providers/common/include/prov/der_wrap.h providers/common/include/prov/der_rsa.h providers/common/include/prov/der_ecx.h providers/common/include/prov/der_sm2.h providers/common/include/prov/der_ec.h providers/common/include/prov/der_digests.h"; my $fips_ld = ($arch =~ m/linux/ ? "providers/fips.ld" : ""); my $cmd1 = "cd ../openssl; make -f $makefile clean build_generated $buildinf $progs $prov_headers $fips_ld;"; system($cmd1) == 0 or die "Error in system($cmd1)"; # Copy and move all arch dependent header files into config/archs make_path("$base_dir/crypto/include/internal", "$base_dir/include/openssl", - "$base_dir/include/crypto", "$base_dir/providers/common/include/prov", - "$base_dir/apps", + "$base_dir/include/crypto", "$base_dir/include/internal", + "$base_dir/providers/common/include/prov", "$base_dir/apps", { error => \my $make_path_err}); if (@$make_path_err) { @@ -73,6 +73,9 @@ my @crypto_dir_headers = shift @ARGV; copy_headers(@crypto_dir_headers, 'crypto'); +my @internal_dir_headers = shift @ARGV; +copy_headers(@internal_dir_headers, 'internal'); + move("$src_dir/include/crypto/bn_conf.h", "$base_dir/include/crypto/bn_conf.h") or die "Move failed: $!"; move("$src_dir/include/crypto/dso_conf.h", @@ -85,8 +88,15 @@ copy("$src_dir/apps/progs.c", "$base_dir/apps") or die "Copy failed: $!"; +move("$src_dir/include/internal/param_names.h", + "$base_dir/include/internal/param_names.h") or die "Move failed: $!"; + copy("$src_dir/providers/common/include/prov/der_dsa.h", "$base_dir/providers/common/include/prov/") or die "Copy failed: $!"; +copy("$src_dir/providers/common/include/prov/der_ml_dsa.h", + "$base_dir/providers/common/include/prov/") or die "Copy failed: $!"; +copy("$src_dir/providers/common/include/prov/der_slh_dsa.h", + "$base_dir/providers/common/include/prov/") or die "Copy failed: $!"; copy("$src_dir/providers/common/include/prov/der_wrap.h", "$base_dir/providers/common/include/prov/") or die "Copy failed: $!"; copy("$src_dir/providers/common/include/prov/der_rsa.h", @@ -363,7 +373,7 @@ # Clean Up my $cmd2 ="cd $src_dir; make -f $makefile clean; make -f $makefile distclean;" . - "git clean -f $src_dir/crypto"; + "git clean -f $src_dir"; system($cmd2) == 0 or die "Error in system($cmd2)"; diff --git a/deps/openssl/config/generate_headers.pl b/deps/openssl/config/generate_headers.pl index 9390ef668d6e01..55d63fb28e696e 100755 --- a/deps/openssl/config/generate_headers.pl +++ b/deps/openssl/config/generate_headers.pl @@ -9,6 +9,7 @@ my @openssl_headers = shift @ARGV; my @crypto_headers = shift @ARGV; +my @internal_headers = shift @ARGV; my $include_tmpl = Text::Template->new(TYPE => 'FILE', SOURCE => 'include.h.tmpl', @@ -25,6 +26,7 @@ gen_headers(@openssl_headers, 'openssl'); gen_headers(@crypto_headers, 'crypto'); +gen_headers(@internal_headers, 'internal'); sub gen_headers { my @headers = split / /, $_[0]; diff --git a/deps/openssl/openssl/CHANGES.md b/deps/openssl/openssl/CHANGES.md index 5b0193bc3955c6..d3958d762af527 100644 --- a/deps/openssl/openssl/CHANGES.md +++ b/deps/openssl/openssl/CHANGES.md @@ -28,6 +28,10 @@ breaking changes, and mappings for the large list of deprecated functions. [Migration guide]: https://github.com/openssl/openssl/tree/master/doc/man7/migration_guide.pod +### Changes between 3.0.16 and 3.0.17 [1 Jul 2025] + + * none yet + ### Changes between 3.0.15 and 3.0.16 [11 Feb 2025] * Fixed timing side-channel in ECDSA signature computation. @@ -3273,7 +3277,7 @@ OpenSSL 1.1.1 * Support for TLSv1.3 added. Note that users upgrading from an earlier version of OpenSSL should review their configuration settings to ensure that they are still appropriate for TLSv1.3. For further information see: - + *Matt Caswell* @@ -4561,7 +4565,7 @@ OpenSSL 1.1.0 * The GOST engine was out of date and therefore it has been removed. An up to date GOST engine is now being maintained in an external repository. - See: . Libssl still retains + See: . Libssl still retains support for GOST ciphersuites (these are only activated if a GOST engine is present). @@ -5340,6 +5344,11 @@ OpenSSL 1.1.0 *Rob Percival * + * SSLv3 is by default disabled at build-time. Builds that are not + configured with "enable-ssl3" will not support SSLv3. + + *Kurt Roeckx* + OpenSSL 1.0.2 ------------- diff --git a/deps/openssl/openssl/Configure b/deps/openssl/openssl/Configure index 0c60d1da1659bd..77bf0cfb96f25c 100755 --- a/deps/openssl/openssl/Configure +++ b/deps/openssl/openssl/Configure @@ -1,6 +1,6 @@ #! /usr/bin/env perl # -*- mode: perl; -*- -# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -157,6 +157,7 @@ my @gcc_devteam_warn = qw( -Wextra -Wno-unused-parameter -Wno-missing-field-initializers + -Wno-unterminated-string-initialization -Wswitch -Wsign-compare -Wshadow diff --git a/deps/openssl/openssl/NEWS.md b/deps/openssl/openssl/NEWS.md index 007fc9786ef810..87fd6d8153e470 100644 --- a/deps/openssl/openssl/NEWS.md +++ b/deps/openssl/openssl/NEWS.md @@ -18,6 +18,14 @@ OpenSSL Releases OpenSSL 3.0 ----------- +### Major changes between OpenSSL 3.0.16 and OpenSSL 3.0.17 [1 Jul 2025] + +OpenSSL 3.0.17 is a bug fix release. + +This release incorporates the following bug fixes and mitigations: + + * Miscellaneous minor bug fixes. + ### Major changes between OpenSSL 3.0.15 and OpenSSL 3.0.16 [11 Feb 2025] OpenSSL 3.0.16 is a security patch release. The most severe CVE fixed in this @@ -329,7 +337,7 @@ OpenSSL 1.1.1 * Rewrite of the packet construction code for "safer" packet handling * Rewrite of the extension handling code For further important information, see the [TLS1.3 page]( - https://wiki.openssl.org/index.php/TLS1.3) in the OpenSSL Wiki. + https://github.com/openssl/openssl/wiki/TLS1.3) in the OpenSSL Wiki. * Complete rewrite of the OpenSSL random number generator to introduce the following capabilities diff --git a/deps/openssl/openssl/NOTES-WINDOWS.md b/deps/openssl/openssl/NOTES-WINDOWS.md index b1d6c4fe13bbe0..44a5f5bf153dfc 100644 --- a/deps/openssl/openssl/NOTES-WINDOWS.md +++ b/deps/openssl/openssl/NOTES-WINDOWS.md @@ -79,6 +79,11 @@ Quick start OpenSSL or - `perl Configure` to let Configure figure out the platform + a. If you don't plan to develop OpenSSL yourself and don't need to rebuild, + in other words, if you always do a new build, turning off the build + dependency feature can speed up build times by up to 50%: + `perl Configure no-makedepend` + 6. `nmake` 7. `nmake test` diff --git a/deps/openssl/openssl/README.md b/deps/openssl/openssl/README.md index 477f5cbb7d1273..c6fbca920b92ad 100644 --- a/deps/openssl/openssl/README.md +++ b/deps/openssl/openssl/README.md @@ -128,8 +128,7 @@ available online. Wiki ---- -There is a Wiki at [wiki.openssl.org] which is currently not very active. -It contains a lot of useful information, not all of which is up to date. +There is a [GitHub Wiki] which is currently not very active. License ======= @@ -178,8 +177,8 @@ All rights reserved. "OpenSSL GitHub Mirror" -[wiki.openssl.org]: - +[GitHub Wiki]: + "OpenSSL Wiki" [migration_guide(7ossl)]: diff --git a/deps/openssl/openssl/VERSION.dat b/deps/openssl/openssl/VERSION.dat index 4b7eb91a451a90..344a35bc5fb32c 100644 --- a/deps/openssl/openssl/VERSION.dat +++ b/deps/openssl/openssl/VERSION.dat @@ -1,7 +1,7 @@ MAJOR=3 MINOR=0 -PATCH=16 +PATCH=17 PRE_RELEASE_TAG= BUILD_METADATA= -RELEASE_DATE="11 Feb 2025" +RELEASE_DATE="1 Jul 2025" SHLIB_VERSION=3 diff --git a/deps/openssl/openssl/apps/CA.pl.in b/deps/openssl/openssl/apps/CA.pl.in index f029470005d946..0bad37d469555b 100644 --- a/deps/openssl/openssl/apps/CA.pl.in +++ b/deps/openssl/openssl/apps/CA.pl.in @@ -1,5 +1,5 @@ #!{- $config{HASHBANGPERL} -} -# Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2000-2025 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -19,14 +19,17 @@ my @OPENSSL_CMDS = ("req", "ca", "pkcs12", "x509", "verify"); my $openssl = $ENV{'OPENSSL'} // "openssl"; $ENV{'OPENSSL'} = $openssl; +my @openssl = split_val($openssl); + my $OPENSSL_CONFIG = $ENV{"OPENSSL_CONFIG"} // ""; +my @OPENSSL_CONFIG = split_val($OPENSSL_CONFIG); # Command invocations. -my $REQ = "$openssl req $OPENSSL_CONFIG"; -my $CA = "$openssl ca $OPENSSL_CONFIG"; -my $VERIFY = "$openssl verify"; -my $X509 = "$openssl x509"; -my $PKCS12 = "$openssl pkcs12"; +my @REQ = (@openssl, "req", @OPENSSL_CONFIG); +my @CA = (@openssl, "ca", @OPENSSL_CONFIG); +my @VERIFY = (@openssl, "verify"); +my @X509 = (@openssl, "x509"); +my @PKCS12 = (@openssl, "pkcs12"); # Default values for various configuration settings. my $CATOP = "./demoCA"; @@ -34,8 +37,10 @@ my $CAKEY = "cakey.pem"; my $CAREQ = "careq.pem"; my $CACERT = "cacert.pem"; my $CACRL = "crl.pem"; -my $DAYS = "-days 365"; -my $CADAYS = "-days 1095"; # 3 years +my @DAYS = qw(-days 365); +my @CADAYS = qw(-days 1095); # 3 years +my @EXTENSIONS = qw(-extensions v3_ca); +my @POLICY = qw(-policy policy_anything); my $NEWKEY = "newkey.pem"; my $NEWREQ = "newreq.pem"; my $NEWCERT = "newcert.pem"; @@ -43,31 +48,177 @@ my $NEWP12 = "newcert.p12"; # Commandline parsing my %EXTRA; -my $WHAT = shift @ARGV || ""; +my $WHAT = shift @ARGV // ""; @ARGV = parse_extra(@ARGV); my $RET = 0; +sub split_val { + return split_val_win32(@_) if ($^O eq 'MSWin32'); + my ($val) = @_; + my (@ret, @frag); + + # Skip leading whitespace + $val =~ m{\A[ \t]*}ogc; + + # Unix shell-compatible split + # + # Handles backslash escapes outside quotes and + # in double-quoted strings. Parameter and + # command-substitution is silently ignored. + # Bare newlines outside quotes and (trailing) backslashes are disallowed. + + while (1) { + last if (pos($val) == length($val)); + + # The first char is never a SPACE or TAB. Possible matches are: + # 1. Ordinary string fragment + # 2. Single-quoted string + # 3. Double-quoted string + # 4. Backslash escape + # 5. Bare backlash or newline (rejected) + # + if ($val =~ m{\G([^'" \t\n\\]+)}ogc) { + # Ordinary string + push @frag, $1; + } elsif ($val =~ m{\G'([^']*)'}ogc) { + # Single-quoted string + push @frag, $1; + } elsif ($val =~ m{\G"}ogc) { + # Double-quoted string + push @frag, ""; + while (1) { + last if ($val =~ m{\G"}ogc); + if ($val =~ m{\G([^"\\]+)}ogcs) { + # literals + push @frag, $1; + } elsif ($val =~ m{\G.(["\`\$\\])}ogc) { + # backslash-escaped special + push @frag, $1; + } elsif ($val =~ m{\G.(.)}ogcs) { + # backslashed non-special + push @frag, "\\$1" unless $1 eq "\n"; + } else { + die sprintf("Malformed quoted string: %s\n", $val); + } + } + } elsif ($val =~ m{\G\\(.)}ogc) { + # Backslash is unconditional escape outside quoted strings + push @frag, $1 unless $1 eq "\n"; + } else { + die sprintf("Bare backslash or newline in: '%s'\n", $val); + } + # Done if at SPACE, TAB or end, otherwise continue current fragment + # + next unless ($val =~ m{\G(?:[ \t]+|\z)}ogcs); + push @ret, join("", splice(@frag)) if (@frag > 0); + } + # Handle final fragment + push @ret, join("", splice(@frag)) if (@frag > 0); + return @ret; +} + +sub split_val_win32 { + my ($val) = @_; + my (@ret, @frag); + + # Skip leading whitespace + $val =~ m{\A[ \t]*}ogc; + + # Windows-compatible split + # See: "Parsing C++ command-line arguments" in: + # https://learn.microsoft.com/en-us/cpp/cpp/main-function-command-line-args?view=msvc-170 + # + # Backslashes are special only when followed by a double-quote + # Pairs of double-quotes make a single double-quote. + # Closing double-quotes may be omitted. + + while (1) { + last if (pos($val) == length($val)); + + # The first char is never a SPACE or TAB. + # 1. Ordinary string fragment + # 2. Double-quoted string + # 3. Backslashes preceding a double-quote + # 4. Literal backslashes + # 5. Bare newline (rejected) + # + if ($val =~ m{\G([^" \t\n\\]+)}ogc) { + # Ordinary string + push @frag, $1; + } elsif ($val =~ m{\G"}ogc) { + # Double-quoted string + push @frag, ""; + while (1) { + if ($val =~ m{\G("+)}ogc) { + # Two double-quotes make one literal double-quote + my $l = length($1); + push @frag, q{"} x int($l/2) if ($l > 1); + next if ($l % 2 == 0); + last; + } + if ($val =~ m{\G([^"\\]+)}ogc) { + push @frag, $1; + } elsif ($val =~ m{\G((?>[\\]+))(?=")}ogc) { + # Backslashes before a double-quote are escapes + my $l = length($1); + push @frag, q{\\} x int($l / 2); + if ($l % 2 == 1) { + ++pos($val); + push @frag, q{"}; + } + } elsif ($val =~ m{\G((?:(?>[\\]+)[^"\\]+)+)}ogc) { + # Backslashes not before a double-quote are not special + push @frag, $1; + } else { + # Tolerate missing closing double-quote + last; + } + } + } elsif ($val =~ m{\G((?>[\\]+))(?=")}ogc) { + my $l = length($1); + push @frag, q{\\} x int($l / 2); + if ($l % 2 == 1) { + ++pos($val); + push @frag, q{"}; + } + } elsif ($val =~ m{\G([\\]+)}ogc) { + # Backslashes not before a double-quote are not special + push @frag, $1; + } else { + die sprintf("Bare newline in: '%s'\n", $val); + } + # Done if at SPACE, TAB or end, otherwise continue current fragment + # + next unless ($val =~ m{\G(?:[ \t]+|\z)}ogcs); + push @ret, join("", splice(@frag)) if (@frag > 0); + } + # Handle final fragment + push @ret, join("", splice(@frag)) if (@frag); + return @ret; +} + # Split out "-extra-CMD value", and return new |@ARGV|. Fill in # |EXTRA{CMD}| with list of values. sub parse_extra { + my @args; foreach ( @OPENSSL_CMDS ) { - $EXTRA{$_} = ''; + $EXTRA{$_} = []; } - - my @result; - while ( scalar(@_) > 0 ) { - my $arg = shift; - if ( $arg !~ m/-extra-([a-z0-9]+)/ ) { - push @result, $arg; + while (@_) { + my $arg = shift(@_); + if ( $arg !~ m{^-extra-(\w+)$} ) { + push @args, split_val($arg); next; } - $arg =~ s/-extra-//; - die("Unknown \"-${arg}-extra\" option, exiting") - unless scalar grep { $arg eq $_ } @OPENSSL_CMDS; - $EXTRA{$arg} .= " " . shift; + $arg = $1; + die "Unknown \"-extra-${arg}\" option, exiting\n" + unless grep { $arg eq $_ } @OPENSSL_CMDS; + die "Missing \"-extra-${arg}\" option value, exiting\n" + unless (@_ > 0); + push @{$EXTRA{$arg}}, split_val(shift(@_)); } - return @result; + return @args; } @@ -110,9 +261,9 @@ sub copy_pemfile # Wrapper around system; useful for debugging. Returns just the exit status sub run { - my $cmd = shift; - print "====\n$cmd\n" if $verbose; - my $status = system($cmd); + my ($cmd, @args) = @_; + print "====\n$cmd @args\n" if $verbose; + my $status = system {$cmd} $cmd, @args; print "==> $status\n====\n" if $verbose; return $status >> 8; } @@ -131,17 +282,15 @@ EOF if ($WHAT eq '-newcert' ) { # create a certificate - $RET = run("$REQ -new -x509 -keyout $NEWKEY -out $NEWCERT $DAYS" - . " $EXTRA{req}"); + $RET = run(@REQ, qw(-new -x509 -keyout), $NEWKEY, "-out", $NEWCERT, @DAYS, @{$EXTRA{req}}); print "Cert is in $NEWCERT, private key is in $NEWKEY\n" if $RET == 0; } elsif ($WHAT eq '-precert' ) { # create a pre-certificate - $RET = run("$REQ -x509 -precert -keyout $NEWKEY -out $NEWCERT $DAYS" - . " $EXTRA{req}"); + $RET = run(@REQ, qw(-x509 -precert -keyout), $NEWKEY, "-out", $NEWCERT, @DAYS, @{$EXTRA{req}}); print "Pre-cert is in $NEWCERT, private key is in $NEWKEY\n" if $RET == 0; } elsif ($WHAT =~ /^\-newreq(\-nodes)?$/ ) { # create a certificate request - $RET = run("$REQ -new $1 -keyout $NEWKEY -out $NEWREQ $DAYS $EXTRA{req}"); + $RET = run(@REQ, "-new", (defined $1 ? ($1,) : ()), "-keyout", $NEWKEY, "-out", $NEWREQ, @{$EXTRA{req}}); print "Request is in $NEWREQ, private key is in $NEWKEY\n" if $RET == 0; } elsif ($WHAT eq '-newca' ) { # create the directory hierarchy @@ -174,48 +323,45 @@ if ($WHAT eq '-newcert' ) { copy_pemfile($FILE,"${CATOP}/$CACERT", "CERTIFICATE"); } else { print "Making CA certificate ...\n"; - $RET = run("$REQ -new -keyout ${CATOP}/private/$CAKEY" - . " -out ${CATOP}/$CAREQ $EXTRA{req}"); - $RET = run("$CA -create_serial" - . " -out ${CATOP}/$CACERT $CADAYS -batch" - . " -keyfile ${CATOP}/private/$CAKEY -selfsign" - . " -extensions v3_ca" - . " -infiles ${CATOP}/$CAREQ $EXTRA{ca}") if $RET == 0; + $RET = run(@REQ, qw(-new -keyout), "${CATOP}/private/$CAKEY", + "-out", "${CATOP}/$CAREQ", @{$EXTRA{req}}); + $RET = run(@CA, qw(-create_serial -out), "${CATOP}/$CACERT", @CADAYS, + qw(-batch -keyfile), "${CATOP}/private/$CAKEY", "-selfsign", + @EXTENSIONS, "-infiles", "${CATOP}/$CAREQ", @{$EXTRA{ca}}) + if $RET == 0; print "CA certificate is in ${CATOP}/$CACERT\n" if $RET == 0; } } elsif ($WHAT eq '-pkcs12' ) { my $cname = $ARGV[0]; $cname = "My Certificate" unless defined $cname; - $RET = run("$PKCS12 -in $NEWCERT -inkey $NEWKEY" - . " -certfile ${CATOP}/$CACERT -out $NEWP12" - . " -export -name \"$cname\" $EXTRA{pkcs12}"); - print "PKCS #12 file is in $NEWP12\n" if $RET == 0; + $RET = run(@PKCS12, "-in", $NEWCERT, "-inkey", $NEWKEY, + "-certfile", "${CATOP}/$CACERT", "-out", $NEWP12, + qw(-export -name), $cname, @{$EXTRA{pkcs12}}); + print "PKCS#12 file is in $NEWP12\n" if $RET == 0; } elsif ($WHAT eq '-xsign' ) { - $RET = run("$CA -policy policy_anything -infiles $NEWREQ $EXTRA{ca}"); + $RET = run(@CA, @POLICY, "-infiles", $NEWREQ, @{$EXTRA{ca}}); } elsif ($WHAT eq '-sign' ) { - $RET = run("$CA -policy policy_anything -out $NEWCERT" - . " -infiles $NEWREQ $EXTRA{ca}"); + $RET = run(@CA, @POLICY, "-out", $NEWCERT, + "-infiles", $NEWREQ, @{$EXTRA{ca}}); print "Signed certificate is in $NEWCERT\n" if $RET == 0; } elsif ($WHAT eq '-signCA' ) { - $RET = run("$CA -policy policy_anything -out $NEWCERT" - . " -extensions v3_ca -infiles $NEWREQ $EXTRA{ca}"); + $RET = run(@CA, @POLICY, "-out", $NEWCERT, @EXTENSIONS, + "-infiles", $NEWREQ, @{$EXTRA{ca}}); print "Signed CA certificate is in $NEWCERT\n" if $RET == 0; } elsif ($WHAT eq '-signcert' ) { - $RET = run("$X509 -x509toreq -in $NEWREQ -signkey $NEWREQ" - . " -out tmp.pem $EXTRA{x509}"); - $RET = run("$CA -policy policy_anything -out $NEWCERT" - . "-infiles tmp.pem $EXTRA{ca}") if $RET == 0; + $RET = run(@X509, qw(-x509toreq -in), $NEWREQ, "-signkey", $NEWREQ, + qw(-out tmp.pem), @{$EXTRA{x509}}); + $RET = run(@CA, @POLICY, "-out", $NEWCERT, + qw(-infiles tmp.pem), @{$EXTRA{ca}}) if $RET == 0; print "Signed certificate is in $NEWCERT\n" if $RET == 0; } elsif ($WHAT eq '-verify' ) { my @files = @ARGV ? @ARGV : ( $NEWCERT ); foreach my $file (@files) { - # -CAfile quoted for VMS, since the C RTL downcases all unquoted - # arguments to C programs - my $status = run("$VERIFY \"-CAfile\" ${CATOP}/$CACERT $file $EXTRA{verify}"); + my $status = run(@VERIFY, "-CAfile", "${CATOP}/$CACERT", $file, @{$EXTRA{verify}}); $RET = $status if $status != 0; } } elsif ($WHAT eq '-crl' ) { - $RET = run("$CA -gencrl -out ${CATOP}/crl/$CACRL $EXTRA{ca}"); + $RET = run(@CA, qw(-gencrl -out), "${CATOP}/crl/$CACRL", @{$EXTRA{ca}}); print "Generated CRL is in ${CATOP}/crl/$CACRL\n" if $RET == 0; } elsif ($WHAT eq '-revoke' ) { my $cname = $ARGV[0]; @@ -223,10 +369,10 @@ if ($WHAT eq '-newcert' ) { print "Certificate filename is required; reason optional.\n"; exit 1; } - my $reason = $ARGV[1]; - $reason = " -crl_reason $reason" - if defined $reason && crl_reason_ok($reason); - $RET = run("$CA -revoke \"$cname\"" . $reason . $EXTRA{ca}); + my @reason; + @reason = ("-crl_reason", $ARGV[1]) + if defined $ARGV[1] && crl_reason_ok($ARGV[1]); + $RET = run(@CA, "-revoke", $cname, @reason, @{$EXTRA{ca}}); } else { print STDERR "Unknown arg \"$WHAT\"\n"; print STDERR "Use -help for help.\n"; diff --git a/deps/openssl/openssl/apps/cmp.c b/deps/openssl/openssl/apps/cmp.c index c479b15496607f..cb65277e6ad97b 100644 --- a/deps/openssl/openssl/apps/cmp.c +++ b/deps/openssl/openssl/apps/cmp.c @@ -1,5 +1,5 @@ /* - * Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2007-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright Nokia 2007-2019 * Copyright Siemens AG 2015-2019 * @@ -878,7 +878,7 @@ static int set_name(const char *str, OSSL_CMP_CTX *ctx, const char *desc) { if (str != NULL) { - X509_NAME *n = parse_name(str, MBSTRING_ASC, 1, desc); + X509_NAME *n = parse_name(str, MBSTRING_UTF8, 1, desc); if (n == NULL) return 0; diff --git a/deps/openssl/openssl/apps/cms.c b/deps/openssl/openssl/apps/cms.c index dce227ef2db5ab..185396ca7b3809 100644 --- a/deps/openssl/openssl/apps/cms.c +++ b/deps/openssl/openssl/apps/cms.c @@ -983,7 +983,7 @@ int cms_main(int argc, char **argv) goto end; pctx = CMS_RecipientInfo_get0_pkey_ctx(ri); - if (kparam != NULL) { + if (pctx != NULL && kparam != NULL) { if (!cms_set_pkey_param(pctx, kparam->param)) goto end; } diff --git a/deps/openssl/openssl/apps/lib/apps.c b/deps/openssl/openssl/apps/lib/apps.c index a632b0cff2bf68..b4c4148c2ec950 100644 --- a/deps/openssl/openssl/apps/lib/apps.c +++ b/deps/openssl/openssl/apps/lib/apps.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1688,6 +1688,9 @@ CA_DB *load_index(const char *dbfile, DB_ATTR *db_attr) } retdb->dbfname = OPENSSL_strdup(dbfile); + if (retdb->dbfname == NULL) + goto err; + #ifndef OPENSSL_NO_POSIX_IO retdb->dbst = dbst; #endif diff --git a/deps/openssl/openssl/apps/lib/http_server.c b/deps/openssl/openssl/apps/lib/http_server.c index 33ae886d4a1cbe..d2bfa432d9662c 100644 --- a/deps/openssl/openssl/apps/lib/http_server.c +++ b/deps/openssl/openssl/apps/lib/http_server.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -222,7 +222,7 @@ BIO *http_server_init_bio(const char *prog, const char *port) int asock; char name[40]; - snprintf(name, sizeof(name), "[::]:%s", port); /* port may be "0" */ + BIO_snprintf(name, sizeof(name), "[::]:%s", port); /* port may be "0" */ bufbio = BIO_new(BIO_f_buffer()); if (bufbio == NULL) goto err; diff --git a/deps/openssl/openssl/apps/ocsp.c b/deps/openssl/openssl/apps/ocsp.c index fb3105da552660..26340805c2b3b3 100644 --- a/deps/openssl/openssl/apps/ocsp.c +++ b/deps/openssl/openssl/apps/ocsp.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1049,6 +1049,10 @@ static void make_ocsp_response(BIO *err, OCSP_RESPONSE **resp, OCSP_REQUEST *req } bs = OCSP_BASICRESP_new(); + if (bs == NULL) { + *resp = OCSP_response_create(OCSP_RESPONSE_STATUS_INTERNALERROR, bs); + goto end; + } thisupd = X509_gmtime_adj(NULL, 0); if (ndays != -1) nextupd = X509_time_adj_ex(NULL, ndays, nmin * 60, NULL); diff --git a/deps/openssl/openssl/apps/pkeyutl.c b/deps/openssl/openssl/apps/pkeyutl.c index 5e5047137632b6..caf3f639eae50e 100644 --- a/deps/openssl/openssl/apps/pkeyutl.c +++ b/deps/openssl/openssl/apps/pkeyutl.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -371,6 +371,7 @@ int pkeyutl_main(int argc, char **argv) if (EVP_PKEY_CTX_ctrl_str(ctx, opt, passwd) <= 0) { BIO_printf(bio_err, "%s: Can't set parameter \"%s\":\n", prog, opt); + OPENSSL_free(passwd); goto end; } OPENSSL_free(passwd); diff --git a/deps/openssl/openssl/apps/s_time.c b/deps/openssl/openssl/apps/s_time.c index 1a58e19de53fe7..b7761915626167 100644 --- a/deps/openssl/openssl/apps/s_time.c +++ b/deps/openssl/openssl/apps/s_time.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -325,8 +325,10 @@ int s_time_main(int argc, char **argv) */ next: - if (!(perform & 2)) + if (!(perform & 2)) { + ret = 0; goto end; + } printf("\n\nNow timing with session id reuse.\n"); /* Get an SSL object so we can reuse the session id */ diff --git a/deps/openssl/openssl/apps/storeutl.c b/deps/openssl/openssl/apps/storeutl.c index 30c9915de3e83a..96b943bf6dd1a4 100644 --- a/deps/openssl/openssl/apps/storeutl.c +++ b/deps/openssl/openssl/apps/storeutl.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -198,9 +198,7 @@ int storeutl_main(int argc, char *argv[]) } break; case OPT_CRITERION_FINGERPRINT: - if (criterion != 0 - || (criterion == OSSL_STORE_SEARCH_BY_KEY_FINGERPRINT - && fingerprint != NULL)) { + if (criterion != 0) { BIO_printf(bio_err, "%s: criterion already given.\n", prog); goto end; diff --git a/deps/openssl/openssl/crypto/asn1/asn_mime.c b/deps/openssl/openssl/crypto/asn1/asn_mime.c index 8bb7089292d0f3..c50665914e3c11 100644 --- a/deps/openssl/openssl/crypto/asn1/asn_mime.c +++ b/deps/openssl/openssl/crypto/asn1/asn_mime.c @@ -1,5 +1,5 @@ /* - * Copyright 2008-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2008-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -96,7 +96,7 @@ int i2d_ASN1_bio_stream(BIO *out, ASN1_VALUE *val, BIO *in, int flags, * internally */ else - ASN1_item_i2d_bio(it, out, val); + rv = ASN1_item_i2d_bio(it, out, val); return rv; } diff --git a/deps/openssl/openssl/crypto/asn1/tasn_enc.c b/deps/openssl/openssl/crypto/asn1/tasn_enc.c index 3ea18b0280ddc5..fb2e040c7c56e3 100644 --- a/deps/openssl/openssl/crypto/asn1/tasn_enc.c +++ b/deps/openssl/openssl/crypto/asn1/tasn_enc.c @@ -1,5 +1,5 @@ /* - * Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2000-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -571,6 +571,9 @@ static int asn1_ex_i2c(const ASN1_VALUE **pval, unsigned char *cout, int *putype return -1; break; + case V_ASN1_UNDEF: + return -2; + case V_ASN1_NULL: cont = NULL; len = 0; diff --git a/deps/openssl/openssl/crypto/bio/bio_dump.c b/deps/openssl/openssl/crypto/bio/bio_dump.c index c453da62688c6c..04191a653054ba 100644 --- a/deps/openssl/openssl/crypto/bio/bio_dump.c +++ b/deps/openssl/openssl/crypto/bio/bio_dump.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -47,6 +47,8 @@ int BIO_dump_indent_cb(int (*cb) (const void *data, size_t len, void *u), for (i = 0; i < rows; i++) { n = BIO_snprintf(buf, sizeof(buf), "%*s%04x - ", indent, "", i * dump_width); + if (n < 0) + return -1; for (j = 0; j < dump_width; j++) { if (SPACE(buf, n, 3)) { if (((i * dump_width) + j) >= len) { diff --git a/deps/openssl/openssl/crypto/bio/bio_print.c b/deps/openssl/openssl/crypto/bio/bio_print.c index 4c9c3af7cfd606..1934a6884251a2 100644 --- a/deps/openssl/openssl/crypto/bio/bio_print.c +++ b/deps/openssl/openssl/crypto/bio/bio_print.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -535,6 +535,10 @@ static LDOUBLE abs_val(LDOUBLE value) LDOUBLE result = value; if (value < 0) result = -value; + if (result > 0 && result / 2 == result) /* INF */ + result = 0; + else if (result != result) /* NAN */ + result = 0; return result; } @@ -590,6 +594,9 @@ fmtfp(char **sbuffer, signvalue = '+'; else if (flags & DP_F_SPACE) signvalue = ' '; + ufvalue = abs_val(fvalue); + if (ufvalue == 0 && fvalue != 0) /* INF or NAN? */ + signvalue = '?'; /* * G_FORMAT sometimes prints like E_FORMAT and sometimes like F_FORMAT @@ -597,12 +604,12 @@ fmtfp(char **sbuffer, * that from here on. */ if (style == G_FORMAT) { - if (fvalue == 0.0) { + if (ufvalue == 0.0) { realstyle = F_FORMAT; - } else if (fvalue < 0.0001) { + } else if (ufvalue < 0.0001) { realstyle = E_FORMAT; - } else if ((max == 0 && fvalue >= 10) - || (max > 0 && fvalue >= pow_10(max))) { + } else if ((max == 0 && ufvalue >= 10) + || (max > 0 && ufvalue >= pow_10(max))) { realstyle = E_FORMAT; } else { realstyle = F_FORMAT; @@ -612,9 +619,9 @@ fmtfp(char **sbuffer, } if (style != F_FORMAT) { - tmpvalue = fvalue; + tmpvalue = ufvalue; /* Calculate the exponent */ - if (fvalue != 0.0) { + if (ufvalue != 0.0) { while (tmpvalue < 1) { tmpvalue *= 10; exp--; @@ -651,9 +658,9 @@ fmtfp(char **sbuffer, } } if (realstyle == E_FORMAT) - fvalue = tmpvalue; + ufvalue = tmpvalue; } - ufvalue = abs_val(fvalue); + /* * By subtracting 65535 (2^16-1) we cancel the low order 15 bits * of ULONG_MAX to avoid using imprecise floating point values. diff --git a/deps/openssl/openssl/crypto/bn/README.pod b/deps/openssl/openssl/crypto/bn/README.pod deleted file mode 100644 index 1286fc0d413253..00000000000000 --- a/deps/openssl/openssl/crypto/bn/README.pod +++ /dev/null @@ -1,241 +0,0 @@ -=pod - -=head1 NAME - -bn_mul_words, bn_mul_add_words, bn_sqr_words, bn_div_words, -bn_add_words, bn_sub_words, bn_mul_comba4, bn_mul_comba8, -bn_sqr_comba4, bn_sqr_comba8, bn_cmp_words, bn_mul_normal, -bn_mul_low_normal, bn_mul_recursive, bn_mul_part_recursive, -bn_mul_low_recursive, bn_sqr_normal, bn_sqr_recursive, -bn_expand, bn_wexpand, bn_expand2, bn_fix_top, bn_check_top, -bn_print, bn_dump, bn_set_max, bn_set_high, bn_set_low - BIGNUM -library internal functions - -=head1 SYNOPSIS - - #include - - BN_ULONG bn_mul_words(BN_ULONG *rp, BN_ULONG *ap, int num, BN_ULONG w); - BN_ULONG bn_mul_add_words(BN_ULONG *rp, BN_ULONG *ap, int num, - BN_ULONG w); - void bn_sqr_words(BN_ULONG *rp, BN_ULONG *ap, int num); - BN_ULONG bn_div_words(BN_ULONG h, BN_ULONG l, BN_ULONG d); - BN_ULONG bn_add_words(BN_ULONG *rp, BN_ULONG *ap, BN_ULONG *bp, - int num); - BN_ULONG bn_sub_words(BN_ULONG *rp, BN_ULONG *ap, BN_ULONG *bp, - int num); - - void bn_mul_comba4(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b); - void bn_mul_comba8(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b); - void bn_sqr_comba4(BN_ULONG *r, BN_ULONG *a); - void bn_sqr_comba8(BN_ULONG *r, BN_ULONG *a); - - int bn_cmp_words(BN_ULONG *a, BN_ULONG *b, int n); - - void bn_mul_normal(BN_ULONG *r, BN_ULONG *a, int na, BN_ULONG *b, - int nb); - void bn_mul_low_normal(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b, int n); - void bn_mul_recursive(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b, int n2, - int dna, int dnb, BN_ULONG *tmp); - void bn_mul_part_recursive(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b, - int n, int tna, int tnb, BN_ULONG *tmp); - void bn_mul_low_recursive(BN_ULONG *r, BN_ULONG *a, BN_ULONG *b, - int n2, BN_ULONG *tmp); - - void bn_sqr_normal(BN_ULONG *r, BN_ULONG *a, int n, BN_ULONG *tmp); - void bn_sqr_recursive(BN_ULONG *r, BN_ULONG *a, int n2, BN_ULONG *tmp); - - void mul(BN_ULONG r, BN_ULONG a, BN_ULONG w, BN_ULONG c); - void mul_add(BN_ULONG r, BN_ULONG a, BN_ULONG w, BN_ULONG c); - void sqr(BN_ULONG r0, BN_ULONG r1, BN_ULONG a); - - BIGNUM *bn_expand(BIGNUM *a, int bits); - BIGNUM *bn_wexpand(BIGNUM *a, int n); - BIGNUM *bn_expand2(BIGNUM *a, int n); - void bn_fix_top(BIGNUM *a); - - void bn_check_top(BIGNUM *a); - void bn_print(BIGNUM *a); - void bn_dump(BN_ULONG *d, int n); - void bn_set_max(BIGNUM *a); - void bn_set_high(BIGNUM *r, BIGNUM *a, int n); - void bn_set_low(BIGNUM *r, BIGNUM *a, int n); - -=head1 DESCRIPTION - -This page documents the internal functions used by the OpenSSL -B implementation. They are described here to facilitate -debugging and extending the library. They are I to be used by -applications. - -=head2 The BIGNUM structure - - typedef struct bignum_st BIGNUM; - - struct bignum_st - { - BN_ULONG *d; /* Pointer to an array of 'BN_BITS2' bit chunks. */ - int top; /* Index of last used d +1. */ - /* The next are internal book keeping for bn_expand. */ - int dmax; /* Size of the d array. */ - int neg; /* one if the number is negative */ - int flags; - }; - - -The integer value is stored in B, a malloc()ed array of words (B), -least significant word first. A B can be either 16, 32 or 64 bits -in size, depending on the 'number of bits' (B) specified in -C. - -B is the size of the B array that has been allocated. B -is the number of words being used, so for a value of 4, bn.d[0]=4 and -bn.top=1. B is 1 if the number is negative. When a B is -B<0>, the B field can be B and B == B<0>. - -B is a bit field of flags which are defined in C. The -flags begin with B. The macros BN_set_flags(b, n) and -BN_get_flags(b, n) exist to enable or fetch flag(s) B from B -structure B. - -Various routines in this library require the use of temporary -B variables during their execution. Since dynamic memory -allocation to create Bs is rather expensive when used in -conjunction with repeated subroutine calls, the B structure is -used. This structure contains B Bs, see -L. - -=head2 Low-level arithmetic operations - -These functions are implemented in C and for several platforms in -assembly language: - -bn_mul_words(B, B, B, B) operates on the B word -arrays B and B. It computes B * B, places the result -in B, and returns the high word (carry). - -bn_mul_add_words(B, B, B, B) operates on the B -word arrays B and B. It computes B * B + B, places -the result in B, and returns the high word (carry). - -bn_sqr_words(B, B, B) operates on the B word array -B and the 2*B word array B. It computes B * B -word-wise, and places the low and high bytes of the result in B. - -bn_div_words(B, B, B) divides the two word number (B, B) -by B and returns the result. - -bn_add_words(B, B, B, B) operates on the B word -arrays B, B and B. It computes B + B, places the -result in B, and returns the high word (carry). - -bn_sub_words(B, B, B, B) operates on the B word -arrays B, B and B. It computes B - B, places the -result in B, and returns the carry (1 if B E B, 0 -otherwise). - -bn_mul_comba4(B, B, B) operates on the 4 word arrays B and -B and the 8 word array B. It computes B*B and places the -result in B. - -bn_mul_comba8(B, B, B) operates on the 8 word arrays B and -B and the 16 word array B. It computes B*B and places the -result in B. - -bn_sqr_comba4(B, B, B) operates on the 4 word arrays B and -B and the 8 word array B. - -bn_sqr_comba8(B, B, B) operates on the 8 word arrays B and -B and the 16 word array B. - -The following functions are implemented in C: - -bn_cmp_words(B, B, B) operates on the B word arrays B -and B. It returns 1, 0 and -1 if B is greater than, equal and -less than B. - -bn_mul_normal(B, B, B, B, B) operates on the B -word array B, the B word array B and the B+B word -array B. It computes B*B and places the result in B. - -bn_mul_low_normal(B, B, B, B) operates on the B word -arrays B, B and B. It computes the B low words of -B*B and places the result in B. - -bn_mul_recursive(B, B, B, B, B, B, B) operates -on the word arrays B and B of length B+B and B+B -(B and B are currently allowed to be 0 or negative) and the 2*B -word arrays B and B. B must be a power of 2. It computes -B*B and places the result in B. - -bn_mul_part_recursive(B, B, B, B, B, B, B) -operates on the word arrays B and B of length B+B and -B+B and the 4*B word arrays B and B. - -bn_mul_low_recursive(B, B, B, B, B) operates on the -B word arrays B and B and the B/2 word arrays B -and B. - -BN_mul() calls bn_mul_normal(), or an optimized implementation if the -factors have the same size: bn_mul_comba8() is used if they are 8 -words long, bn_mul_recursive() if they are larger than -B and the size is an exact multiple of the word -size, and bn_mul_part_recursive() for others that are larger than -B. - -bn_sqr_normal(B, B, B, B) operates on the B word array -B and the 2*B word arrays B and B. - -The implementations use the following macros which, depending on the -architecture, may use "long long" C operations or inline assembler. -They are defined in C. - -mul(B, B, B, B) computes B*B+B and places the -low word of the result in B and the high word in B. - -mul_add(B, B, B, B) computes B*B+B+B and -places the low word of the result in B and the high word in B. - -sqr(B, B, B) computes B*B and places the low word -of the result in B and the high word in B. - -=head2 Size changes - -bn_expand() ensures that B has enough space for a B bit -number. bn_wexpand() ensures that B has enough space for an -B word number. If the number has to be expanded, both macros -call bn_expand2(), which allocates a new B array and copies the -data. They return B on error, B otherwise. - -The bn_fix_top() macro reduces Btop> to point to the most -significant non-zero word plus one when B has shrunk. - -=head2 Debugging - -bn_check_top() verifies that C<((a)-Etop E= 0 && (a)-Etop -E= (a)-Edmax)>. A violation will cause the program to abort. - -bn_print() prints B to stderr. bn_dump() prints B words at B -(in reverse order, i.e. most significant word first) to stderr. - -bn_set_max() makes B a static number with a B of its current size. -This is used by bn_set_low() and bn_set_high() to make B a read-only -B that contains the B low or high words of B. - -If B is not defined, bn_check_top(), bn_print(), bn_dump() -and bn_set_max() are defined as empty macros. - -=head1 SEE ALSO - -L - -=head1 COPYRIGHT - -Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved. - -Licensed under the Apache License 2.0 (the "License"). You may not use -this file except in compliance with the License. You can obtain a copy -in the file LICENSE in the source distribution or at -L. - -=cut diff --git a/deps/openssl/openssl/crypto/cmp/cmp_client.c b/deps/openssl/openssl/crypto/cmp/cmp_client.c index 4c8dbfdcd739ad..8950091b93ee04 100644 --- a/deps/openssl/openssl/crypto/cmp/cmp_client.c +++ b/deps/openssl/openssl/crypto/cmp/cmp_client.c @@ -1,5 +1,5 @@ /* - * Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2007-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright Nokia 2007-2019 * Copyright Siemens AG 2015-2019 * @@ -611,8 +611,10 @@ static int cert_response(OSSL_CMP_CTX *ctx, int sleep, int rid, ERR_add_error_data(1, "; cannot extract certificate from response"); return 0; } - if (!ossl_cmp_ctx_set0_newCert(ctx, cert)) + if (!ossl_cmp_ctx_set0_newCert(ctx, cert)) { + X509_free(cert); return 0; + } /* * if the CMP server returned certificates in the caPubs field, copy them diff --git a/deps/openssl/openssl/crypto/cms/cms_pwri.c b/deps/openssl/openssl/crypto/cms/cms_pwri.c index 2373092bed550c..d5c3c8d399dfd1 100644 --- a/deps/openssl/openssl/crypto/cms/cms_pwri.c +++ b/deps/openssl/openssl/crypto/cms/cms_pwri.c @@ -1,5 +1,5 @@ /* - * Copyright 2009-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2009-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -157,7 +157,8 @@ CMS_RecipientInfo *CMS_add0_recipient_password(CMS_ContentInfo *cms, /* Setup PBE algorithm */ - pwri->keyDerivationAlgorithm = PKCS5_pbkdf2_set(iter, NULL, 0, -1, -1); + pwri->keyDerivationAlgorithm = PKCS5_pbkdf2_set_ex(iter, NULL, 0, -1, -1, + cms_ctx->libctx); if (pwri->keyDerivationAlgorithm == NULL) goto err; @@ -351,9 +352,10 @@ int ossl_cms_RecipientInfo_pwri_crypt(const CMS_ContentInfo *cms, /* Finish password based key derivation to setup key in "ctx" */ - if (EVP_PBE_CipherInit(algtmp->algorithm, - (char *)pwri->pass, pwri->passlen, - algtmp->parameter, kekctx, en_de) < 0) { + if (EVP_PBE_CipherInit_ex(algtmp->algorithm, + (char *)pwri->pass, pwri->passlen, + algtmp->parameter, kekctx, en_de, + cms_ctx->libctx, cms_ctx->propq) < 0) { ERR_raise(ERR_LIB_CMS, ERR_R_EVP_LIB); goto err; } diff --git a/deps/openssl/openssl/crypto/dh/dh_pmeth.c b/deps/openssl/openssl/crypto/dh/dh_pmeth.c index f201eede0df4f3..fd6c85230236cd 100644 --- a/deps/openssl/openssl/crypto/dh/dh_pmeth.c +++ b/deps/openssl/openssl/crypto/dh/dh_pmeth.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -424,7 +424,7 @@ static int pkey_dh_derive(EVP_PKEY_CTX *ctx, unsigned char *key, ret = DH_compute_key_padded(key, dhpubbn, dh); else ret = DH_compute_key(key, dhpubbn, dh); - if (ret < 0) + if (ret <= 0) return ret; *keylen = ret; return 1; diff --git a/deps/openssl/openssl/crypto/encode_decode/encoder_pkey.c b/deps/openssl/openssl/crypto/encode_decode/encoder_pkey.c index 39266a95309dcd..b372170db2560c 100644 --- a/deps/openssl/openssl/crypto/encode_decode/encoder_pkey.c +++ b/deps/openssl/openssl/crypto/encode_decode/encoder_pkey.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -207,6 +207,7 @@ encoder_construct_pkey(OSSL_ENCODER_INSTANCE *encoder_inst, void *arg) static void encoder_destruct_pkey(void *arg) { struct construct_data_st *data = arg; + int match = (data->obj == data->constructed_obj); if (data->encoder_inst != NULL) { OSSL_ENCODER *encoder = @@ -215,6 +216,8 @@ static void encoder_destruct_pkey(void *arg) encoder->free_object(data->constructed_obj); } data->constructed_obj = NULL; + if (match) + data->obj = NULL; } /* diff --git a/deps/openssl/openssl/crypto/evp/bio_enc.c b/deps/openssl/openssl/crypto/evp/bio_enc.c index 304030bcb3b553..d0cb911142787d 100644 --- a/deps/openssl/openssl/crypto/evp/bio_enc.c +++ b/deps/openssl/openssl/crypto/evp/bio_enc.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -159,6 +159,7 @@ static int enc_read(BIO *b, char *out, int outl) /* Should be continue next time we are called? */ if (!BIO_should_retry(next)) { ctx->cont = i; + ctx->finished = 1; i = EVP_CipherFinal_ex(ctx->cipher, ctx->buf, &(ctx->buf_len)); ctx->ok = i; diff --git a/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c b/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c index 44d0895bcf1447..cbf02dc5f831d6 100644 --- a/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c +++ b/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c @@ -1,5 +1,5 @@ /* - * Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -2827,11 +2827,15 @@ static int evp_pkey_ctx_setget_params_to_ctrl(EVP_PKEY_CTX *pctx, int evp_pkey_ctx_set_params_to_ctrl(EVP_PKEY_CTX *ctx, const OSSL_PARAM *params) { + if (ctx->keymgmt != NULL) + return 0; return evp_pkey_ctx_setget_params_to_ctrl(ctx, SET, (OSSL_PARAM *)params); } int evp_pkey_ctx_get_params_to_ctrl(EVP_PKEY_CTX *ctx, OSSL_PARAM *params) { + if (ctx->keymgmt != NULL) + return 0; return evp_pkey_ctx_setget_params_to_ctrl(ctx, GET, params); } diff --git a/deps/openssl/openssl/crypto/evp/evp_pbe.c b/deps/openssl/openssl/crypto/evp/evp_pbe.c index 56ee69865c9ee3..1eb03a119a64c4 100644 --- a/deps/openssl/openssl/crypto/evp/evp_pbe.c +++ b/deps/openssl/openssl/crypto/evp/evp_pbe.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -40,7 +40,8 @@ static const EVP_PBE_CTL builtin_pbe[] = { {EVP_PBE_TYPE_OUTER, NID_pbeWithSHA1AndRC2_CBC, NID_rc2_64_cbc, NID_sha1, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, - {EVP_PBE_TYPE_OUTER, NID_id_pbkdf2, -1, -1, PKCS5_v2_PBKDF2_keyivgen}, + {EVP_PBE_TYPE_OUTER, NID_id_pbkdf2, -1, -1, PKCS5_v2_PBKDF2_keyivgen, + PKCS5_v2_PBKDF2_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And128BitRC4, NID_rc4, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, diff --git a/deps/openssl/openssl/crypto/evp/evp_rand.c b/deps/openssl/openssl/crypto/evp/evp_rand.c index c36dbdc56c7753..99c2da0b161d5e 100644 --- a/deps/openssl/openssl/crypto/evp/evp_rand.c +++ b/deps/openssl/openssl/crypto/evp/evp_rand.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -634,10 +634,8 @@ static int evp_rand_nonce_locked(EVP_RAND_CTX *ctx, unsigned char *out, { unsigned int str = evp_rand_strength_locked(ctx); - if (ctx->meth->nonce == NULL) - return 0; - if (ctx->meth->nonce(ctx->algctx, out, str, outlen, outlen)) - return 1; + if (ctx->meth->nonce != NULL) + return ctx->meth->nonce(ctx->algctx, out, str, outlen, outlen) > 0; return evp_rand_generate_locked(ctx, out, outlen, str, 0, NULL, 0); } diff --git a/deps/openssl/openssl/crypto/evp/exchange.c b/deps/openssl/openssl/crypto/evp/exchange.c index d7a4ad142aa75c..859c77ca420e88 100644 --- a/deps/openssl/openssl/crypto/evp/exchange.c +++ b/deps/openssl/openssl/crypto/evp/exchange.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -430,7 +430,13 @@ int EVP_PKEY_derive_set_peer_ex(EVP_PKEY_CTX *ctx, EVP_PKEY *peer, */ if (provkey == NULL) goto legacy; - return ctx->op.kex.exchange->set_peer(ctx->op.kex.algctx, provkey); + ret = ctx->op.kex.exchange->set_peer(ctx->op.kex.algctx, provkey); + if (ret <= 0) + return ret; + EVP_PKEY_free(ctx->peerkey); + ctx->peerkey = peer; + EVP_PKEY_up_ref(peer); + return 1; legacy: #ifdef FIPS_MODULE diff --git a/deps/openssl/openssl/crypto/evp/legacy_sha.c b/deps/openssl/openssl/crypto/evp/legacy_sha.c index ca9a3264978abe..02ce0f042c5acf 100644 --- a/deps/openssl/openssl/crypto/evp/legacy_sha.c +++ b/deps/openssl/openssl/crypto/evp/legacy_sha.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -215,7 +215,7 @@ const EVP_MD *EVP_shake##bitlen(void) \ NID_shake##bitlen, \ 0, \ bitlen / 8, \ - EVP_MD_FLAG_XOF, \ + EVP_MD_FLAG_XOF | EVP_MD_FLAG_DIGALGID_ABSENT, \ EVP_ORIG_GLOBAL, \ LEGACY_EVP_MD_METH_TABLE(shake_init, sha3_int_update, sha3_int_final, \ shake_ctrl, (KECCAK1600_WIDTH - bitlen * 2) / 8), \ diff --git a/deps/openssl/openssl/crypto/evp/pmeth_lib.c b/deps/openssl/openssl/crypto/evp/pmeth_lib.c index 5cd0c4b27f6db3..d9a22799d051e0 100644 --- a/deps/openssl/openssl/crypto/evp/pmeth_lib.c +++ b/deps/openssl/openssl/crypto/evp/pmeth_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -501,6 +501,12 @@ EVP_PKEY_CTX *EVP_PKEY_CTX_dup(const EVP_PKEY_CTX *pctx) } rctx->legacy_keytype = pctx->legacy_keytype; + if (pctx->keymgmt != NULL) { + if (!EVP_KEYMGMT_up_ref(pctx->keymgmt)) + goto err; + rctx->keymgmt = pctx->keymgmt; + } + if (EVP_PKEY_CTX_IS_DERIVE_OP(pctx)) { if (pctx->op.kex.exchange != NULL) { rctx->op.kex.exchange = pctx->op.kex.exchange; @@ -604,6 +610,9 @@ EVP_PKEY_CTX *EVP_PKEY_CTX_dup(const EVP_PKEY_CTX *pctx) EVP_KEYMGMT *tmp_keymgmt = pctx->keymgmt; void *provkey; + if (pctx->pkey == NULL) + return rctx; + provkey = evp_pkey_export_to_provider(pctx->pkey, pctx->libctx, &tmp_keymgmt, pctx->propquery); if (provkey == NULL) @@ -721,8 +730,9 @@ int EVP_PKEY_CTX_set_params(EVP_PKEY_CTX *ctx, const OSSL_PARAM *params) ctx->op.encap.kem->set_ctx_params(ctx->op.encap.algctx, params); break; -#ifndef FIPS_MODULE case EVP_PKEY_STATE_UNKNOWN: + break; +#ifndef FIPS_MODULE case EVP_PKEY_STATE_LEGACY: return evp_pkey_ctx_set_params_to_ctrl(ctx, params); #endif @@ -759,8 +769,9 @@ int EVP_PKEY_CTX_get_params(EVP_PKEY_CTX *ctx, OSSL_PARAM *params) ctx->op.encap.kem->get_ctx_params(ctx->op.encap.algctx, params); break; -#ifndef FIPS_MODULE case EVP_PKEY_STATE_UNKNOWN: + break; +#ifndef FIPS_MODULE case EVP_PKEY_STATE_LEGACY: return evp_pkey_ctx_get_params_to_ctrl(ctx, params); #endif diff --git a/deps/openssl/openssl/crypto/http/http_client.c b/deps/openssl/openssl/crypto/http/http_client.c index c86db4405b8f19..c1fe9df5d02f7f 100644 --- a/deps/openssl/openssl/crypto/http/http_client.c +++ b/deps/openssl/openssl/crypto/http/http_client.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright Siemens AG 2018-2020 * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -1138,13 +1138,12 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, const char *expected_ct, int expect_asn1, size_t max_resp_len, int timeout) { - char *current_url, *redirection_url = NULL; + char *current_url; int n_redirs = 0; char *host; char *port; char *path; int use_ssl; - OSSL_HTTP_REQ_CTX *rctx = NULL; BIO *resp = NULL; time_t max_time = timeout > 0 ? time(NULL) + timeout : 0; @@ -1156,6 +1155,9 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, return NULL; for (;;) { + char *redirection_url; + OSSL_HTTP_REQ_CTX *rctx; + if (!OSSL_HTTP_parse_url(current_url, &use_ssl, NULL /* user */, &host, &port, NULL /* port_num */, &path, NULL, NULL)) break; @@ -1164,6 +1166,7 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, use_ssl, bio, rbio, bio_update_fn, arg, buf_size, timeout); new_rpath: + redirection_url = NULL; if (rctx != NULL) { if (!OSSL_HTTP_set1_request(rctx, path, headers, NULL /* content_type */, @@ -1173,9 +1176,9 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, 0 /* no keep_alive */)) { OSSL_HTTP_REQ_CTX_free(rctx); rctx = NULL; - } else { + } else { resp = OSSL_HTTP_exchange(rctx, &redirection_url); - } + } } OPENSSL_free(path); if (resp == NULL && redirection_url != NULL) { @@ -1190,7 +1193,6 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, OPENSSL_free(host); OPENSSL_free(port); (void)OSSL_HTTP_close(rctx, 1); - rctx = NULL; BIO_free(resp); OPENSSL_free(current_url); return NULL; @@ -1200,7 +1202,6 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, OPENSSL_free(host); OPENSSL_free(port); (void)OSSL_HTTP_close(rctx, 1); - rctx = NULL; continue; } /* if redirection not allowed, ignore it */ @@ -1210,7 +1211,6 @@ BIO *OSSL_HTTP_get(const char *url, const char *proxy, const char *no_proxy, OPENSSL_free(port); if (!OSSL_HTTP_close(rctx, resp != NULL)) { BIO_free(resp); - rctx = NULL; resp = NULL; } break; diff --git a/deps/openssl/openssl/crypto/params_dup.c b/deps/openssl/openssl/crypto/params_dup.c index bc1546fc53cb56..f2c9537be8b14d 100644 --- a/deps/openssl/openssl/crypto/params_dup.c +++ b/deps/openssl/openssl/crypto/params_dup.c @@ -1,5 +1,5 @@ /* - * Copyright 2021-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -189,18 +189,18 @@ OSSL_PARAM *OSSL_PARAM_merge(const OSSL_PARAM *p1, const OSSL_PARAM *p2) while (1) { /* If list1 is finished just tack list2 onto the end */ if (*p1cur == NULL) { - do { + while (*p2cur != NULL) { *dst++ = **p2cur; p2cur++; - } while (*p2cur != NULL); + } break; } /* If list2 is finished just tack list1 onto the end */ if (*p2cur == NULL) { - do { + while (*p1cur != NULL) { *dst++ = **p1cur; p1cur++; - } while (*p1cur != NULL); + } break; } /* consume the list element with the smaller key */ diff --git a/deps/openssl/openssl/crypto/perlasm/sparcv9_modes.pl b/deps/openssl/openssl/crypto/perlasm/sparcv9_modes.pl index 76a2727aba62ab..3f24511c1a6bfe 100644 --- a/deps/openssl/openssl/crypto/perlasm/sparcv9_modes.pl +++ b/deps/openssl/openssl/crypto/perlasm/sparcv9_modes.pl @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2012-2016 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2012-2025 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -46,8 +46,8 @@ sub alg_cbc_encrypt_implement { .align 32 ${alg}${bits}_t4_cbc_encrypt: save %sp, -$::frame, %sp - cmp $len, 0 - be,pn $::size_t_cc, .L${bits}_cbc_enc_abort + cmp $len, 15 + bleu,pn $::size_t_cc, .L${bits}_cbc_enc_abort srln $len, 0, $len ! needed on v8+, "nop" on v9 sub $inp, $out, $blk_init ! $inp!=$out ___ @@ -264,8 +264,8 @@ sub alg_cbc_decrypt_implement { .align 32 ${alg}${bits}_t4_cbc_decrypt: save %sp, -$::frame, %sp - cmp $len, 0 - be,pn $::size_t_cc, .L${bits}_cbc_dec_abort + cmp $len, 15 + bleu,pn $::size_t_cc, .L${bits}_cbc_dec_abort srln $len, 0, $len ! needed on v8+, "nop" on v9 sub $inp, $out, $blk_init ! $inp!=$out ___ diff --git a/deps/openssl/openssl/crypto/pkcs7/pk7_smime.c b/deps/openssl/openssl/crypto/pkcs7/pk7_smime.c index 4593da8f5b4148..6812829ead301b 100644 --- a/deps/openssl/openssl/crypto/pkcs7/pk7_smime.c +++ b/deps/openssl/openssl/crypto/pkcs7/pk7_smime.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -347,10 +347,8 @@ int PKCS7_verify(PKCS7 *p7, STACK_OF(X509) *certs, X509_STORE *store, if (flags & PKCS7_TEXT) { if (!SMIME_text(tmpout, out)) { ERR_raise(ERR_LIB_PKCS7, PKCS7_R_SMIME_TEXT_ERROR); - BIO_free(tmpout); goto err; } - BIO_free(tmpout); } /* Now Verify All Signatures */ @@ -368,6 +366,8 @@ int PKCS7_verify(PKCS7 *p7, STACK_OF(X509) *certs, X509_STORE *store, ret = 1; err: + if (flags & PKCS7_TEXT) + BIO_free(tmpout); X509_STORE_CTX_free(cert_ctx); OPENSSL_free(buf); if (tmpin == indata) { diff --git a/deps/openssl/openssl/crypto/property/property.c b/deps/openssl/openssl/crypto/property/property.c index 75615d39af3664..b0a24a7ccb6e86 100644 --- a/deps/openssl/openssl/crypto/property/property.c +++ b/deps/openssl/openssl/crypto/property/property.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -329,7 +329,7 @@ int ossl_method_store_add(OSSL_METHOD_STORE *store, const OSSL_PROVIDER *prov, /* Insert into the hash table if required */ if (!ossl_property_write_lock(store)) { - OPENSSL_free(impl); + impl_free(impl); return 0; } ossl_method_cache_flush(store, nid); diff --git a/deps/openssl/openssl/crypto/provider_conf.c b/deps/openssl/openssl/crypto/provider_conf.c index 9333b8777f2bf5..b42e8412bec0f4 100644 --- a/deps/openssl/openssl/crypto/provider_conf.c +++ b/deps/openssl/openssl/crypto/provider_conf.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -331,7 +331,7 @@ static int provider_conf_load(OSSL_LIB_CTX *libctx, const char *name, ok = provider_conf_params(NULL, &entry, NULL, value, cnf); if (ok >= 1 && (entry.path != NULL || entry.parameters != NULL)) { ok = ossl_provider_info_add_to_store(libctx, &entry); - added = 1; + added = ok; } if (added == 0) ossl_provider_info_clear(&entry); diff --git a/deps/openssl/openssl/crypto/threads_pthread.c b/deps/openssl/openssl/crypto/threads_pthread.c index 801855c9306e20..303f481bef1256 100644 --- a/deps/openssl/openssl/crypto/threads_pthread.c +++ b/deps/openssl/openssl/crypto/threads_pthread.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -250,7 +250,7 @@ int CRYPTO_atomic_or(uint64_t *val, uint64_t op, uint64_t *ret, int CRYPTO_atomic_load(uint64_t *val, uint64_t *ret, CRYPTO_RWLOCK *lock) { -# if defined(__GNUC__) && defined(__ATOMIC_ACQUIRE) && !defined(BROKEN_CLANG_ATOMICS) +# if defined(__GNUC__) && defined(__ATOMIC_ACQ_REL) && !defined(BROKEN_CLANG_ATOMICS) if (__atomic_is_lock_free(sizeof(*val), val)) { __atomic_load(val, ret, __ATOMIC_ACQUIRE); return 1; diff --git a/deps/openssl/openssl/crypto/ts/ts_rsp_sign.c b/deps/openssl/openssl/crypto/ts/ts_rsp_sign.c index 8937bb2d6671e6..99de0bb1a2e5d1 100644 --- a/deps/openssl/openssl/crypto/ts/ts_rsp_sign.c +++ b/deps/openssl/openssl/crypto/ts/ts_rsp_sign.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -645,8 +645,12 @@ static int ossl_ess_add1_signing_cert(PKCS7_SIGNER_INFO *si, } OPENSSL_free(pp); - return PKCS7_add_signed_attribute(si, NID_id_smime_aa_signingCertificate, - V_ASN1_SEQUENCE, seq); + if (!PKCS7_add_signed_attribute(si, NID_id_smime_aa_signingCertificate, + V_ASN1_SEQUENCE, seq)) { + ASN1_STRING_free(seq); + return 0; + } + return 1; } static int ossl_ess_add1_signing_cert_v2(PKCS7_SIGNER_INFO *si, @@ -668,8 +672,12 @@ static int ossl_ess_add1_signing_cert_v2(PKCS7_SIGNER_INFO *si, } OPENSSL_free(pp); - return PKCS7_add_signed_attribute(si, NID_id_smime_aa_signingCertificateV2, - V_ASN1_SEQUENCE, seq); + if (!PKCS7_add_signed_attribute(si, NID_id_smime_aa_signingCertificateV2, + V_ASN1_SEQUENCE, seq)) { + ASN1_STRING_free(seq); + return 0; + } + return 1; } static int ts_RESP_sign(TS_RESP_CTX *ctx) diff --git a/deps/openssl/openssl/crypto/ui/ui_lib.c b/deps/openssl/openssl/crypto/ui/ui_lib.c index 2ddf76cb5357f1..fe7393bfd6108e 100644 --- a/deps/openssl/openssl/crypto/ui/ui_lib.c +++ b/deps/openssl/openssl/crypto/ui/ui_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -207,6 +207,7 @@ int UI_dup_input_string(UI *ui, const char *prompt, int flags, char *result_buf, int minsize, int maxsize) { char *prompt_copy = NULL; + int ret; if (prompt != NULL) { prompt_copy = OPENSSL_strdup(prompt); @@ -216,9 +217,13 @@ int UI_dup_input_string(UI *ui, const char *prompt, int flags, } } - return general_allocate_string(ui, prompt_copy, 1, - UIT_PROMPT, flags, result_buf, minsize, - maxsize, NULL); + ret = general_allocate_string(ui, prompt_copy, 1, + UIT_PROMPT, flags, result_buf, minsize, + maxsize, NULL); + if (ret <= 0) + OPENSSL_free(prompt_copy); + + return ret; } int UI_add_verify_string(UI *ui, const char *prompt, int flags, @@ -235,6 +240,7 @@ int UI_dup_verify_string(UI *ui, const char *prompt, int flags, const char *test_buf) { char *prompt_copy = NULL; + int ret; if (prompt != NULL) { prompt_copy = OPENSSL_strdup(prompt); @@ -244,9 +250,12 @@ int UI_dup_verify_string(UI *ui, const char *prompt, int flags, } } - return general_allocate_string(ui, prompt_copy, 1, - UIT_VERIFY, flags, result_buf, minsize, - maxsize, test_buf); + ret = general_allocate_string(ui, prompt_copy, 1, + UIT_VERIFY, flags, result_buf, minsize, + maxsize, test_buf); + if (ret <= 0) + OPENSSL_free(prompt_copy); + return ret; } int UI_add_input_boolean(UI *ui, const char *prompt, const char *action_desc, @@ -266,6 +275,7 @@ int UI_dup_input_boolean(UI *ui, const char *prompt, const char *action_desc, char *action_desc_copy = NULL; char *ok_chars_copy = NULL; char *cancel_chars_copy = NULL; + int ret; if (prompt != NULL) { prompt_copy = OPENSSL_strdup(prompt); @@ -299,9 +309,14 @@ int UI_dup_input_boolean(UI *ui, const char *prompt, const char *action_desc, } } - return general_allocate_boolean(ui, prompt_copy, action_desc_copy, - ok_chars_copy, cancel_chars_copy, 1, - UIT_BOOLEAN, flags, result_buf); + ret = general_allocate_boolean(ui, prompt_copy, action_desc_copy, + ok_chars_copy, cancel_chars_copy, 1, + UIT_BOOLEAN, flags, result_buf); + if (ret <= 0) + goto err; + + return ret; + err: OPENSSL_free(prompt_copy); OPENSSL_free(action_desc_copy); @@ -319,6 +334,7 @@ int UI_add_info_string(UI *ui, const char *text) int UI_dup_info_string(UI *ui, const char *text) { char *text_copy = NULL; + int ret; if (text != NULL) { text_copy = OPENSSL_strdup(text); @@ -328,8 +344,11 @@ int UI_dup_info_string(UI *ui, const char *text) } } - return general_allocate_string(ui, text_copy, 1, UIT_INFO, 0, NULL, - 0, 0, NULL); + ret = general_allocate_string(ui, text_copy, 1, UIT_INFO, 0, NULL, + 0, 0, NULL); + if (ret <= 0) + OPENSSL_free(text_copy); + return ret; } int UI_add_error_string(UI *ui, const char *text) @@ -341,6 +360,7 @@ int UI_add_error_string(UI *ui, const char *text) int UI_dup_error_string(UI *ui, const char *text) { char *text_copy = NULL; + int ret; if (text != NULL) { text_copy = OPENSSL_strdup(text); @@ -349,8 +369,12 @@ int UI_dup_error_string(UI *ui, const char *text) return -1; } } - return general_allocate_string(ui, text_copy, 1, UIT_ERROR, 0, NULL, - 0, 0, NULL); + + ret = general_allocate_string(ui, text_copy, 1, UIT_ERROR, 0, NULL, + 0, 0, NULL); + if (ret <= 0) + OPENSSL_free(text_copy); + return ret; } char *UI_construct_prompt(UI *ui, const char *phrase_desc, diff --git a/deps/openssl/openssl/crypto/x509/by_store.c b/deps/openssl/openssl/crypto/x509/by_store.c index 050735ce324734..e486fb0a9d9429 100644 --- a/deps/openssl/openssl/crypto/x509/by_store.c +++ b/deps/openssl/openssl/crypto/x509/by_store.c @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2018-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -7,23 +7,34 @@ * https://www.openssl.org/source/license.html */ +#include #include #include "internal/cryptlib.h" #include "crypto/x509.h" #include "x509_local.h" +typedef struct cached_store_st { + char *uri; + OSSL_LIB_CTX *libctx; + char *propq; + OSSL_STORE_CTX *ctx; +} CACHED_STORE; + +DEFINE_STACK_OF(CACHED_STORE) + /* Generic object loader, given expected type and criterion */ -static int cache_objects(X509_LOOKUP *lctx, const char *uri, - const OSSL_STORE_SEARCH *criterion, - int depth, OSSL_LIB_CTX *libctx, const char *propq) +static int cache_objects(X509_LOOKUP *lctx, CACHED_STORE *store, + const OSSL_STORE_SEARCH *criterion, int depth) { int ok = 0; - OSSL_STORE_CTX *ctx = NULL; + OSSL_STORE_CTX *ctx = store->ctx; X509_STORE *xstore = X509_LOOKUP_get_store(lctx); - if ((ctx = OSSL_STORE_open_ex(uri, libctx, propq, NULL, NULL, NULL, - NULL, NULL)) == NULL) + if (ctx == NULL + && (ctx = OSSL_STORE_open_ex(store->uri, store->libctx, store->propq, + NULL, NULL, NULL, NULL, NULL)) == NULL) return 0; + store->ctx = ctx; /* * We try to set the criterion, but don't care if it was valid or not. @@ -62,9 +73,15 @@ static int cache_objects(X509_LOOKUP *lctx, const char *uri, * This is an entry in the "directory" represented by the current * uri. if |depth| allows, dive into it. */ - if (depth > 0) - ok = cache_objects(lctx, OSSL_STORE_INFO_get0_NAME(info), - criterion, depth - 1, libctx, propq); + if (depth > 0) { + CACHED_STORE substore; + + substore.uri = (char *)OSSL_STORE_INFO_get0_NAME(info); + substore.libctx = store->libctx; + substore.propq = store->propq; + substore.ctx = NULL; + ok = cache_objects(lctx, &substore, criterion, depth - 1); + } } else { /* * We know that X509_STORE_add_{cert|crl} increments the object's @@ -88,27 +105,38 @@ static int cache_objects(X509_LOOKUP *lctx, const char *uri, break; } OSSL_STORE_close(ctx); + store->ctx = NULL; return ok; } -/* Because OPENSSL_free is a macro and for C type match */ -static void free_uri(OPENSSL_STRING data) +static void free_store(CACHED_STORE *store) { - OPENSSL_free(data); + if (store != NULL) { + OSSL_STORE_close(store->ctx); + OPENSSL_free(store->uri); + OPENSSL_free(store->propq); + OPENSSL_free(store); + } } static void by_store_free(X509_LOOKUP *ctx) { - STACK_OF(OPENSSL_STRING) *uris = X509_LOOKUP_get_method_data(ctx); - sk_OPENSSL_STRING_pop_free(uris, free_uri); + STACK_OF(CACHED_STORE) *stores = X509_LOOKUP_get_method_data(ctx); + sk_CACHED_STORE_pop_free(stores, free_store); } static int by_store_ctrl_ex(X509_LOOKUP *ctx, int cmd, const char *argp, long argl, char **retp, OSSL_LIB_CTX *libctx, const char *propq) { + /* + * In some cases below, failing to use the defaults shouldn't result in + * an error. |use_default| is used as the return code in those cases. + */ + int use_default = argp == NULL; + switch (cmd) { case X509_L_ADD_STORE: /* If no URI is given, use the default cert dir as default URI */ @@ -118,21 +146,50 @@ static int by_store_ctrl_ex(X509_LOOKUP *ctx, int cmd, const char *argp, argp = X509_get_default_cert_dir(); { - STACK_OF(OPENSSL_STRING) *uris = X509_LOOKUP_get_method_data(ctx); - char *data = OPENSSL_strdup(argp); + STACK_OF(CACHED_STORE) *stores = X509_LOOKUP_get_method_data(ctx); + CACHED_STORE *store = OPENSSL_zalloc(sizeof(*store)); - if (data == NULL) { + if (store == NULL) { return 0; } - if (uris == NULL) { - uris = sk_OPENSSL_STRING_new_null(); - X509_LOOKUP_set_method_data(ctx, uris); + + store->uri = OPENSSL_strdup(argp); + store->libctx = libctx; + if (propq != NULL) + store->propq = OPENSSL_strdup(propq); + store->ctx = OSSL_STORE_open_ex(argp, libctx, propq, NULL, NULL, + NULL, NULL, NULL); + if (store->ctx == NULL + || (propq != NULL && store->propq == NULL) + || store->uri == NULL) { + free_store(store); + return use_default; + } + + if (stores == NULL) { + stores = sk_CACHED_STORE_new_null(); + if (stores != NULL) + X509_LOOKUP_set_method_data(ctx, stores); } - return sk_OPENSSL_STRING_push(uris, data) > 0; + if (stores == NULL || sk_CACHED_STORE_push(stores, store) <= 0) { + free_store(store); + return 0; + } + return 1; } - case X509_L_LOAD_STORE: + case X509_L_LOAD_STORE: { /* This is a shortcut for quick loading of specific containers */ - return cache_objects(ctx, argp, NULL, 0, libctx, propq); + CACHED_STORE store; + + store.uri = (char *)argp; + store.libctx = libctx; + store.propq = (char *)propq; + store.ctx = NULL; + return cache_objects(ctx, &store, NULL, 0); + } + default: + /* Unsupported command */ + return 0; } return 0; @@ -145,16 +202,15 @@ static int by_store_ctrl(X509_LOOKUP *ctx, int cmd, } static int by_store(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, - const OSSL_STORE_SEARCH *criterion, X509_OBJECT *ret, - OSSL_LIB_CTX *libctx, const char *propq) + const OSSL_STORE_SEARCH *criterion, X509_OBJECT *ret) { - STACK_OF(OPENSSL_STRING) *uris = X509_LOOKUP_get_method_data(ctx); + STACK_OF(CACHED_STORE) *stores = X509_LOOKUP_get_method_data(ctx); int i; int ok = 0; - for (i = 0; i < sk_OPENSSL_STRING_num(uris); i++) { - ok = cache_objects(ctx, sk_OPENSSL_STRING_value(uris, i), criterion, - 1 /* depth */, libctx, propq); + for (i = 0; i < sk_CACHED_STORE_num(stores); i++) { + ok = cache_objects(ctx, sk_CACHED_STORE_value(stores, i), criterion, + 1 /* depth */); if (ok) break; @@ -162,13 +218,12 @@ static int by_store(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, return ok; } -static int by_store_subject_ex(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, - const X509_NAME *name, X509_OBJECT *ret, - OSSL_LIB_CTX *libctx, const char *propq) +static int by_store_subject(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, + const X509_NAME *name, X509_OBJECT *ret) { OSSL_STORE_SEARCH *criterion = OSSL_STORE_SEARCH_by_name((X509_NAME *)name); /* won't modify it */ - int ok = by_store(ctx, type, criterion, ret, libctx, propq); + int ok = by_store(ctx, type, criterion, ret); STACK_OF(X509_OBJECT) *store_objects = X509_STORE_get0_objects(X509_LOOKUP_get_store(ctx)); X509_OBJECT *tmp = NULL; @@ -216,12 +271,6 @@ static int by_store_subject_ex(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, return ok; } -static int by_store_subject(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type, - const X509_NAME *name, X509_OBJECT *ret) -{ - return by_store_subject_ex(ctx, type, name, ret, NULL, NULL); -} - /* * We lack the implementations for get_by_issuer_serial, get_by_fingerprint * and get_by_alias. There's simply not enough support in the X509_LOOKUP @@ -239,7 +288,7 @@ static X509_LOOKUP_METHOD x509_store_lookup = { NULL, /* get_by_issuer_serial */ NULL, /* get_by_fingerprint */ NULL, /* get_by_alias */ - by_store_subject_ex, + NULL, /* get_by_subject_ex */ by_store_ctrl_ex }; diff --git a/deps/openssl/openssl/crypto/x509/v3_lib.c b/deps/openssl/openssl/crypto/x509/v3_lib.c index 5ffeb75d9f5b2a..6d91df99550f18 100644 --- a/deps/openssl/openssl/crypto/x509/v3_lib.c +++ b/deps/openssl/openssl/crypto/x509/v3_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -99,7 +99,11 @@ int X509V3_EXT_add_alias(int nid_to, int nid_from) *tmpext = *ext; tmpext->ext_nid = nid_to; tmpext->ext_flags |= X509V3_EXT_DYNAMIC; - return X509V3_EXT_add(tmpext); + if (!X509V3_EXT_add(tmpext)) { + OPENSSL_free(tmpext); + return 0; + } + return 1; } void X509V3_EXT_cleanup(void) diff --git a/deps/openssl/openssl/e_os.h b/deps/openssl/openssl/e_os.h index 72eab92eeb4b89..34b8a4af7dfcad 100644 --- a/deps/openssl/openssl/e_os.h +++ b/deps/openssl/openssl/e_os.h @@ -1,5 +1,5 @@ /* - * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -126,17 +126,6 @@ # define EACCES 13 # endif # include -# ifdef _WIN64 -# define strlen(s) _strlen31(s) -/* cut strings to 2GB */ -static __inline unsigned int _strlen31(const char *str) -{ - unsigned int len = 0; - while (*str && len < 0x80000000U) - str++, len++; - return len & 0x7FFFFFFF; -} -# endif # include # if defined(_MSC_VER) && !defined(_WIN32_WCE) && !defined(_DLL) && defined(stdin) # if _MSC_VER>=1300 && _MSC_VER<1600 diff --git a/deps/openssl/openssl/fuzz/x509.c b/deps/openssl/openssl/fuzz/x509.c index e2d2639164c01d..b1e12cf901cab6 100644 --- a/deps/openssl/openssl/fuzz/x509.c +++ b/deps/openssl/openssl/fuzz/x509.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -78,9 +78,13 @@ int FuzzerTestOneInput(const uint8_t *buf, size_t len) resp = d2i_OCSP_RESPONSE(NULL, &p, len); store = X509_STORE_new(); + if (store == NULL) + goto err; X509_STORE_add_cert(store, x509_2); param = X509_VERIFY_PARAM_new(); + if (param == NULL) + goto err; X509_VERIFY_PARAM_set_flags(param, X509_V_FLAG_NO_CHECK_TIME); X509_VERIFY_PARAM_set_flags(param, X509_V_FLAG_X509_STRICT); X509_VERIFY_PARAM_set_flags(param, X509_V_FLAG_PARTIAL_CHAIN); diff --git a/deps/openssl/openssl/include/internal/constant_time.h b/deps/openssl/openssl/include/internal/constant_time.h index 2b49afe1ea2a5c..3583344b4d866a 100644 --- a/deps/openssl/openssl/include/internal/constant_time.h +++ b/deps/openssl/openssl/include/internal/constant_time.h @@ -1,5 +1,5 @@ /* - * Copyright 2014-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2014-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -296,6 +296,18 @@ static ossl_inline size_t value_barrier_s(size_t a) return r; } +/* Convenience method for unsigned char. */ +static ossl_inline unsigned char value_barrier_8(unsigned char a) +{ +#if !defined(OPENSSL_NO_ASM) && defined(__GNUC__) + unsigned char r; + __asm__("" : "=r"(r) : "0"(a)); +#else + volatile unsigned char r = a; +#endif + return r; +} + static ossl_inline unsigned int constant_time_select(unsigned int mask, unsigned int a, unsigned int b) @@ -356,7 +368,7 @@ static ossl_inline void constant_time_cond_swap_32(uint32_t mask, uint32_t *a, { uint32_t xor = *a ^ *b; - xor &= mask; + xor &= value_barrier_32(mask); *a ^= xor; *b ^= xor; } @@ -376,7 +388,7 @@ static ossl_inline void constant_time_cond_swap_64(uint64_t mask, uint64_t *a, { uint64_t xor = *a ^ *b; - xor &= mask; + xor &= value_barrier_64(mask); *a ^= xor; *b ^= xor; } @@ -403,7 +415,7 @@ static ossl_inline void constant_time_cond_swap_buff(unsigned char mask, for (i = 0; i < len; i++) { tmp = a[i] ^ b[i]; - tmp &= mask; + tmp &= value_barrier_8(mask); a[i] ^= tmp; b[i] ^= tmp; } diff --git a/deps/openssl/openssl/providers/fips-sources.checksums b/deps/openssl/openssl/providers/fips-sources.checksums index 1ab5d0348c6ca2..2352a671d7f58c 100644 --- a/deps/openssl/openssl/providers/fips-sources.checksums +++ b/deps/openssl/openssl/providers/fips-sources.checksums @@ -197,9 +197,9 @@ e2705097cfab64e8d7eb2feba37c3f12b18aec74b135ad0c7f073efccf336d4c crypto/ec/ecx_ 62c994fd91dc4a5a1a81dfa9391d6eadae62d3549b2e1b22acb2e7c4cd278f27 crypto/evp/evp_fetch.c ebe32b2895f7f9767710674352c8949efe93b4bbb5e7b71c27bb5d1822339b46 crypto/evp/evp_lib.c 78f07bf50b6999611a4e9414ab3a20b219b0ab29ca2bd05002d6919a3f67b8eb crypto/evp/evp_local.h -117e679d49d2ae87e49d3c942ff0ce768959e8b9713f84a99025cabba462ccd5 crypto/evp/evp_rand.c +a801c0f785d2089d69872f0874bc91c2f90939839b5a1d76d33994eb5ced4754 crypto/evp/evp_rand.c 2a128617ec0178e9eeacbe41d75a5530755f41ea524cd124607543cf73456a0c crypto/evp/evp_utils.c -ca8c6cfd30efd53f2e5d1f19bcf09a3a3d0dff6d8947c3943d07a3f4b354aa86 crypto/evp/exchange.c +cedb38e16de356c6d3fcd087801db059ab8b5a857b3687ad36ff3e75654cc142 crypto/evp/exchange.c 9e25042581b73e295c059c6217f3ecf809134d518eb79b1b67f34e3ca9145677 crypto/evp/kdf_lib.c 1d72f5506984df1df8606e8c7045f041cf517223e2e1b50c4da8ba8bf1c6c186 crypto/evp/kdf_meth.c 5179624b8e03615dc9caedc9ec16d094fa081495613dd552d71c2c39475bcd83 crypto/evp/kem.c @@ -211,7 +211,7 @@ e7e8eb5683cd3fbd409df888020dc353b65ac291361829cc4131d5bc86c9fcb3 crypto/evp/mac 546d83abecf8973e2d872102a03bde5a46552909fa9e7d1402e1464a27453801 crypto/evp/p_lib.c 3b4228b92eebd04616ecc3ee58684095313dd5ffd1b43cf698a7d6c202cb4622 crypto/evp/pmeth_check.c 1f0e9e94e9b0ad322956521b438b78d44cfcd8eb974e8921d05f9e21ba1c05cf crypto/evp/pmeth_gn.c -76511fba789089a50ef87774817a5482c33633a76a94ecf7b6e8eb915585575d crypto/evp/pmeth_lib.c +59af1ebff5870b531d1e007979ba59ced21c58a5fa481d2a8b24e9e1eb635bd5 crypto/evp/pmeth_lib.c 53058617c153a7676e7ca18c98c23df867a93087d67935907076f3c5bd65c15e crypto/evp/signature.c f2acfb82aac20251d05a9c252cc6c282bd44e43feac4ac2e0faf68b9a38aef57 crypto/ex_data.c 1c8389c5d49616d491978f0f2b2a54ba82d805ec41c8f75c67853216953cf46a crypto/ffc/ffc_backend.c @@ -254,12 +254,12 @@ abba788a11469f5c01c766fdac64eccd4fb598b2d4d9a12efb086ae87009acb8 crypto/o_str.c c698d5166d091d6bb6e9df3c211fe1cc916fd43a26ec844f28f547cd708f9c55 crypto/param_build.c 2a0f272dd553b698e8c6fa57962694ebd6064cb03fe26a60df529205568d315d crypto/param_build_set.c 0e4a5388a92fabbe5a540176c0b4c5ce258b78dc9168ecc2e805352a06aaf0ba crypto/params.c -4fda13f6af05d80b0ab89ec4f5813c274a21a9b4565be958a02d006236cef05c crypto/params_dup.c +9121f614b65e471ddf944192015c8d19c34032629cfc284ebcc277469a9164dd crypto/params_dup.c b6cbfc8791b31587f32a3f9e4c117549793528ebddc34a361bad1ad8cf8d4c42 crypto/params_from_text.c 97cb7414dc2f165d5849ee3b46cdfff0afb067729435d9c01a747e0ca41e230c crypto/ppccap.c 3ca43596a7528dec8ff9d1a3cd0d68b62640f84b1d6a8b5e4842cfd0be1133ad crypto/ppccpuid.pl b4d34272a0bd1fbe6562022bf7ea6259b6a5a021a48222d415be47ef5ef2a905 crypto/property/defn_cache.c -c3709986fd2ab18f3c6136d8dd7705a4538986aa789ceafe770c3a376db3c569 crypto/property/property.c +9b5fbefe6b18f665b44f79d1d08a977b484064a9fba46506ed8e812e581e9d97 crypto/property/property.c 66da4f28d408133fb544b14aeb9ad4913e7c5c67e2826e53f0dc5bf4d8fada26 crypto/property/property_local.h b0b382ce829192d2537561cfb0fb5c7afb04305f321f7b3c91441b4ba99b9c92 crypto/property/property_parse.c a7cefda6a117550e2c76e0f307565ce1e11640b11ba10c80e469a837fd1212a3 crypto/property/property_query.c @@ -344,11 +344,11 @@ c50c584c55e56347bb43aca4b796b5344d70daece3061f586b79c871c21f5d1a crypto/sparse_ 8da78169fa8c09dc3c29c9bf1602b22e88c5eac4815e274ba1864c166e31584b crypto/stack/stack.c 7b4efa594d8d1f3ecbf4605cf54f72fb296a3b1d951bdc69e415aaa08f34e5c8 crypto/threads_lib.c a41ae93a755e2ec89b3cb5b4932e2b508fdda92ace2e025a2650a6da0e9e972c crypto/threads_none.c -3729e2bd36f945808b578e0d89fac0fcb3114e4fc9381614bcbd8a9869991716 crypto/threads_pthread.c +0a085bd6a70d449c79783c7b11383ae427df28a19fd4651571003306079bb72f crypto/threads_pthread.c f82715745b668297d71b66d05e6bfc3c817bf80bd967c0f33ca7ffbb6e347645 crypto/threads_win.c fd6c27cf7c6b5449b17f2b725f4203c4c10207f1973db09fd41571efe5de08fd crypto/x86_64cpuid.pl bbec287bb9bf35379885f8f8998b7fd9e8fc22efee9e1b299109af0f33a7ee16 crypto/x86cpuid.pl -acbb841170d4d3eb91d969be1c0e4973b1babfd5fcd76440b0628f509f82fd76 e_os.h +4a61cecc1d1d547cb414404c73efe71cac8ab7885a03780a55c3ff8a74b1de26 e_os.h 249a0e58e9692920eddc1ada2ac772a0cfd749cfbf618f2f5da08280df545d8f include/crypto/aes_platform.h 8c6f308c1ca774e6127e325c3b80511dbcdc99631f032694d8db53a5c02364ee include/crypto/asn1_dsa.h 3bded0eaa7ccdebd0b4217b7fdb82676d5c0762a88aca462dbceaef851fafa99 include/crypto/bn.h @@ -373,7 +373,7 @@ f326212c978576c5346c89ae0336c2428594494b54054f6045b1f1038bfbc004 include/crypto 7676b02824b2d68df6bddeb251e9b8a8fa2e35a95dad9a7ebeca53f9ab8d2dad include/crypto/sparse_array.h 7ad02c7de77304c3b298deeb038ab2550cf8b2bce03021994477c6c43dbcf86e include/crypto/types.h 782a83d4e489fd865e2768a20bfa31e78c2071fd0ceeb9eb077276ae2bcc6590 include/internal/bio.h -8e984890c7c62cdd6356963f034831831f7167c65096cb4d23bc765d84d2c598 include/internal/constant_time.h +c64d5338564a30577c86347d99763f1a3321ec12a65c7d61298ea78a3f136a83 include/internal/constant_time.h c5bb97f654984130c8b44c09a52395bce0b22985d5dbc9c4d9377d86283f11f8 include/internal/core.h 0b572801dfb8a41cc239e3439f8097a0ad11bbdf5d54811d10ceba3175cf2f17 include/internal/cryptlib.h 9571cfd3d5666749084b354a6d65adee443deeb5713a58c098c7b03bc69dbc63 include/internal/deprecated.h @@ -542,7 +542,7 @@ bb67eaa7a98494ca938726f9218213870fc97dd87b56bda950626cc794baf20b providers/impl c4b1cb143de15acc396ce2e03fdd165defd25ebc831de9cdfacf408ea883c666 providers/implementations/ciphers/ciphercommon_local.h 39b47b6ef9d71852964c26e07ef0e9b23f04c7493b1b16ba7c3dba7074b6b70d providers/implementations/digests/digestcommon.c 80551b53302d95faea257df3edbdbd02d48427ce42da2c4335f998456400d057 providers/implementations/digests/sha2_prov.c -de342d04be6af69037922d5c97bdc40c0c27f6740636e72786a765d0d8ad9173 providers/implementations/digests/sha3_prov.c +52608810d317b4cfe358d5a668369f834f845bc5f82e475d7ecaae5ca0144293 providers/implementations/digests/sha3_prov.c b5f94d597df72ca58486c59b2a70b4057d13f09528f861ed41a84b7125b54a82 providers/implementations/exchange/dh_exch.c 9c46dc0d859875fcc0bc3d61a7b610cd3520b1bf63718775c1124f54a1fe5f24 providers/implementations/exchange/ecdh_exch.c 9bf87b8429398a6465c7e9f749a33b84974303a458736b56f3359b30726d3969 providers/implementations/exchange/ecx_exch.c @@ -557,7 +557,7 @@ b9a61ce951c1904d8315b1bb26c0ab0aaadb47e71d4ead5df0a891608c728c4b providers/impl c95ce5498e724b9b3d58e3c2f4723e7e3e4beb07f9bea9422e43182cbadb43af providers/implementations/include/prov/macsignature.h 29d1a112b799e1f45fdf8bcee8361c2ed67428c250c1cdf408a9fbb7ebf4cce1 providers/implementations/include/prov/names.h 2187713b446d8b6d24ee986748b941ac3e24292c71e07ff9fb53a33021decdda providers/implementations/include/prov/seeding.h -6091dd22e716fbe6c7c94524cdee6ad4432a572f2d3c4d360dcafafa3902d692 providers/implementations/kdfs/hkdf.c +9d84007b7d13c70ceef8709ba8c92bfffa894aabfe1802993f33f1268c18aab0 providers/implementations/kdfs/hkdf.c a62e3af09f5af84dcf36f951ba4ac90ca1694adaf3747126186020b155f94186 providers/implementations/kdfs/kbkdf.c e0644e727aacfea4da3cf2c4d2602d7ef0626ebb760b6467432ffd54d5fbb24d providers/implementations/kdfs/pbkdf2.c c0778565abff112c0c5257329a7750ec4605e62f26cc36851fa1fbee6e03c70c providers/implementations/kdfs/pbkdf2.h @@ -571,9 +571,9 @@ abe2b0f3711eaa34846e155cffc9242e4051c45de896f747afd5ac9d87f637dc providers/impl 9316fc619e8d8a1d841aa0936fc62c28eb2b4c60cc6c9b2d64b72f8641f28abb providers/implementations/keymgmt/dsa_kmgmt.c 9bc88451d3ae110c7a108ee73d3b3b6bda801ec3494d2dfb9c9970b85c2d34fe providers/implementations/keymgmt/ec_kmgmt.c 258ae17bb2dd87ed1511a8eb3fe99eed9b77f5c2f757215ff6b3d0e8791fc251 providers/implementations/keymgmt/ec_kmgmt_imexport.inc -d0c67b7fbddd51dcfebd96bf99794ca3bc437d50974ebcd56968fb8dd3627b0f providers/implementations/keymgmt/ecx_kmgmt.c +5f76cf9d17e14f471f90ebadcd94fca654c806f4356b84d5b9363e8be4599bcb providers/implementations/keymgmt/ecx_kmgmt.c 053a2be39a87f50b877ebdbbf799cf5faf8b2de33b04311d819d212ee1ea329b providers/implementations/keymgmt/kdf_legacy_kmgmt.c -37e2f9f904eeabf94b1e4152b67ac236f872aa78dd7e47bf0de1b8f50ac19b6c providers/implementations/keymgmt/mac_legacy_kmgmt.c +e0450f253ca54624587046edd28f071f55bf3088847dc8a4de79491079ad475d providers/implementations/keymgmt/mac_legacy_kmgmt.c 19f22fc70a6321441e56d5bd4aab3d01d52d17069d4e4b5cefce0f411ecece75 providers/implementations/keymgmt/rsa_kmgmt.c 5eb96ea2df635cf79c5aeccae270fbe896b5e6384a5b3e4b187ce8c10fe8dfc7 providers/implementations/macs/cmac_prov.c e69aa06f8f3c6f5a26702b9f44a844b8589b99dc0ee590953a29e8b9ef10acbe providers/implementations/macs/gmac_prov.c @@ -581,11 +581,11 @@ e69aa06f8f3c6f5a26702b9f44a844b8589b99dc0ee590953a29e8b9ef10acbe providers/impl 8640b63fd8325aaf8f7128d6cc448d9af448a65bf51a8978075467d33a67944e providers/implementations/macs/kmac_prov.c bf30274dd6b528ae913984775bd8f29c6c48c0ef06d464d0f738217727b7aa5c providers/implementations/rands/crngt.c f9457255fc57ef5739aa2584e535195e38cc947e31fd044d28d64c28c8a946ce providers/implementations/rands/drbg.c -7e8fa6333845778474ed1313a66867512512372c9397f699a8f68fa6d5fc05fa providers/implementations/rands/drbg_ctr.c +42e895fe255d90f9135eada30466811e3909ea4fd07fb968435dc5feee94ebf8 providers/implementations/rands/drbg_ctr.c 8337994f4bc95e421d6d2833bb4481ad9d84deb3913d0faec6e1791ea372a793 providers/implementations/rands/drbg_hash.c 1f040090f596f88cb64d6eb89109a8b75e66caee113708fb59335ad2547027fc providers/implementations/rands/drbg_hmac.c 7a1b8516f891f25f3dc07ffe0455200f20d3a1f0345a917f00c7d9afe900bb0a providers/implementations/rands/drbg_local.h -04339b66c10017229ef368cb48077f58a252ebfda9ab12b9f919e4149b1036ed providers/implementations/rands/test_rng.c +66c0a91e23ae4275cc3f5daa8437d1c0addd10ca2e8aefab4573d606c5ba27ba providers/implementations/rands/test_rng.c cafb9e6f54ad15889fcebddac6df61336bff7d78936f7de3bb5aab8aee5728d2 providers/implementations/signature/dsa_sig.c a30dc6308de0ca33406e7ce909f3bcf7580fb84d863b0976b275839f866258df providers/implementations/signature/ecdsa_sig.c 09647b736980ac3c762f1e7c10cbfee78e2c6ab327ac62e5039968cea034ff3b providers/implementations/signature/eddsa_sig.c diff --git a/deps/openssl/openssl/providers/fips.checksum b/deps/openssl/openssl/providers/fips.checksum index 0904f6a1029ed9..9ca4c219e703cd 100644 --- a/deps/openssl/openssl/providers/fips.checksum +++ b/deps/openssl/openssl/providers/fips.checksum @@ -1 +1 @@ -01b31117f96429fe4c8efbf7f4f10ef32efa2b11c69851fd227e4194db116b6f providers/fips-sources.checksums +0cbed2adf7acee36e3ef1906e6de0946b423cc9354c878e54bcbc7a363aeec0d providers/fips-sources.checksums diff --git a/deps/openssl/openssl/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc b/deps/openssl/openssl/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc index 310f4470d6d49a..d633ebd54470b6 100644 --- a/deps/openssl/openssl/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc +++ b/deps/openssl/openssl/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -15,10 +15,8 @@ size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len, const void *key, unsigned char ivec[16], u64 *Xi) { - size_t align_bytes = 0; - align_bytes = len - len % 16; - AES_KEY *aes_key = (AES_KEY *)key; + size_t align_bytes = len - len % 16; switch(aes_key->rounds) { case 10: @@ -37,10 +35,8 @@ size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t size_t armv8_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len, const void *key, unsigned char ivec[16], u64 *Xi) { - size_t align_bytes = 0; - align_bytes = len - len % 16; - AES_KEY *aes_key = (AES_KEY *)key; + size_t align_bytes = len - len % 16; switch(aes_key->rounds) { case 10: diff --git a/deps/openssl/openssl/providers/implementations/ciphers/cipher_chacha20_poly1305.c b/deps/openssl/openssl/providers/implementations/ciphers/cipher_chacha20_poly1305.c index 28ba0fee43d3ea..7fba6ab64f39f3 100644 --- a/deps/openssl/openssl/providers/implementations/ciphers/cipher_chacha20_poly1305.c +++ b/deps/openssl/openssl/providers/implementations/ciphers/cipher_chacha20_poly1305.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -32,7 +32,7 @@ static OSSL_FUNC_cipher_set_ctx_params_fn chacha20_poly1305_set_ctx_params; static OSSL_FUNC_cipher_cipher_fn chacha20_poly1305_cipher; static OSSL_FUNC_cipher_final_fn chacha20_poly1305_final; static OSSL_FUNC_cipher_gettable_ctx_params_fn chacha20_poly1305_gettable_ctx_params; -#define chacha20_poly1305_settable_ctx_params ossl_cipher_aead_settable_ctx_params +static OSSL_FUNC_cipher_settable_ctx_params_fn chacha20_poly1305_settable_ctx_params; #define chacha20_poly1305_gettable_params ossl_cipher_generic_gettable_params #define chacha20_poly1305_update chacha20_poly1305_cipher @@ -158,6 +158,21 @@ static const OSSL_PARAM *chacha20_poly1305_gettable_ctx_params return chacha20_poly1305_known_gettable_ctx_params; } +static const OSSL_PARAM chacha20_poly1305_known_settable_ctx_params[] = { + OSSL_PARAM_size_t(OSSL_CIPHER_PARAM_KEYLEN, NULL), + OSSL_PARAM_size_t(OSSL_CIPHER_PARAM_IVLEN, NULL), + OSSL_PARAM_octet_string(OSSL_CIPHER_PARAM_AEAD_TAG, NULL, 0), + OSSL_PARAM_octet_string(OSSL_CIPHER_PARAM_AEAD_TLS1_AAD, NULL, 0), + OSSL_PARAM_octet_string(OSSL_CIPHER_PARAM_AEAD_TLS1_IV_FIXED, NULL, 0), + OSSL_PARAM_END +}; +static const OSSL_PARAM *chacha20_poly1305_settable_ctx_params( + ossl_unused void *cctx, ossl_unused void *provctx + ) +{ + return chacha20_poly1305_known_settable_ctx_params; +} + static int chacha20_poly1305_set_ctx_params(void *vctx, const OSSL_PARAM params[]) { @@ -238,7 +253,6 @@ static int chacha20_poly1305_set_ctx_params(void *vctx, return 0; } } - /* ignore OSSL_CIPHER_PARAM_AEAD_MAC_KEY */ return 1; } diff --git a/deps/openssl/openssl/providers/implementations/digests/sha3_prov.c b/deps/openssl/openssl/providers/implementations/digests/sha3_prov.c index 168825d47564e9..f6358e62562e3b 100644 --- a/deps/openssl/openssl/providers/implementations/digests/sha3_prov.c +++ b/deps/openssl/openssl/providers/implementations/digests/sha3_prov.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -19,7 +19,7 @@ #include "prov/implementations.h" #define SHA3_FLAGS PROV_DIGEST_FLAG_ALGID_ABSENT -#define SHAKE_FLAGS PROV_DIGEST_FLAG_XOF +#define SHAKE_FLAGS (PROV_DIGEST_FLAG_XOF | PROV_DIGEST_FLAG_ALGID_ABSENT) #define KMAC_FLAGS PROV_DIGEST_FLAG_XOF /* diff --git a/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c b/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c index 69ef565d04fcca..1197a678e935c9 100644 --- a/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c +++ b/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -233,13 +233,11 @@ static int hkdf_common_set_ctx_params(KDF_HKDF *ctx, const OSSL_PARAM params[]) } if ((p = OSSL_PARAM_locate_const(params, OSSL_KDF_PARAM_SALT)) != NULL) { - if (p->data_size != 0 && p->data != NULL) { - OPENSSL_free(ctx->salt); - ctx->salt = NULL; - if (!OSSL_PARAM_get_octet_string(p, (void **)&ctx->salt, 0, - &ctx->salt_len)) - return 0; - } + OPENSSL_free(ctx->salt); + ctx->salt = NULL; + if (!OSSL_PARAM_get_octet_string(p, (void **)&ctx->salt, 0, + &ctx->salt_len)) + return 0; } return 1; diff --git a/deps/openssl/openssl/providers/implementations/keymgmt/ecx_kmgmt.c b/deps/openssl/openssl/providers/implementations/keymgmt/ecx_kmgmt.c index 94e62f755c20de..d5dd01a314a206 100644 --- a/deps/openssl/openssl/providers/implementations/keymgmt/ecx_kmgmt.c +++ b/deps/openssl/openssl/providers/implementations/keymgmt/ecx_kmgmt.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -344,7 +344,6 @@ static const OSSL_PARAM ecx_gettable_params[] = { OSSL_PARAM_int(OSSL_PKEY_PARAM_BITS, NULL), OSSL_PARAM_int(OSSL_PKEY_PARAM_SECURITY_BITS, NULL), OSSL_PARAM_int(OSSL_PKEY_PARAM_MAX_SIZE, NULL), - OSSL_PARAM_utf8_string(OSSL_PKEY_PARAM_MANDATORY_DIGEST, NULL, 0), OSSL_PARAM_octet_string(OSSL_PKEY_PARAM_ENCODED_PUBLIC_KEY, NULL, 0), ECX_KEY_TYPES(), OSSL_PARAM_END @@ -354,6 +353,7 @@ static const OSSL_PARAM ed_gettable_params[] = { OSSL_PARAM_int(OSSL_PKEY_PARAM_BITS, NULL), OSSL_PARAM_int(OSSL_PKEY_PARAM_SECURITY_BITS, NULL), OSSL_PARAM_int(OSSL_PKEY_PARAM_MAX_SIZE, NULL), + OSSL_PARAM_utf8_string(OSSL_PKEY_PARAM_MANDATORY_DIGEST, NULL, 0), ECX_KEY_TYPES(), OSSL_PARAM_END }; @@ -485,6 +485,8 @@ static void *ecx_gen_init(void *provctx, int selection, gctx->libctx = libctx; gctx->type = type; gctx->selection = selection; + } else { + return NULL; } if (!ecx_gen_set_params(gctx, params)) { ecx_gen_cleanup(gctx); @@ -694,6 +696,9 @@ static void ecx_gen_cleanup(void *genctx) { struct ecx_gen_ctx *gctx = genctx; + if (gctx == NULL) + return; + OPENSSL_free(gctx->propq); OPENSSL_free(gctx); } diff --git a/deps/openssl/openssl/providers/implementations/keymgmt/mac_legacy_kmgmt.c b/deps/openssl/openssl/providers/implementations/keymgmt/mac_legacy_kmgmt.c index b02a0a91c6f661..0b8cf9252c4b95 100644 --- a/deps/openssl/openssl/providers/implementations/keymgmt/mac_legacy_kmgmt.c +++ b/deps/openssl/openssl/providers/implementations/keymgmt/mac_legacy_kmgmt.c @@ -527,6 +527,9 @@ static void mac_gen_cleanup(void *genctx) { struct mac_gen_ctx *gctx = genctx; + if (gctx == NULL) + return; + OPENSSL_secure_clear_free(gctx->priv_key, gctx->priv_key_len); ossl_prov_cipher_reset(&gctx->cipher); OPENSSL_free(gctx); diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c b/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c index 21fdce640816dc..269459c1cf09e9 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c +++ b/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c @@ -1,5 +1,5 @@ /* - * Copyright 2011-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2011-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -20,6 +20,7 @@ #include "prov/providercommon.h" #include "prov/provider_ctx.h" #include "drbg_local.h" +#include "internal/cryptlib.h" static OSSL_FUNC_rand_newctx_fn drbg_ctr_new_wrapper; static OSSL_FUNC_rand_freectx_fn drbg_ctr_free; @@ -80,6 +81,8 @@ static void ctr_XOR(PROV_DRBG_CTR *ctr, const unsigned char *in, size_t inlen) * are XORing. So just process however much input we have. */ n = inlen < ctr->keylen ? inlen : ctr->keylen; + if (!ossl_assert(n <= sizeof(ctr->K))) + return; for (i = 0; i < n; i++) ctr->K[i] ^= in[i]; if (inlen <= ctr->keylen) diff --git a/deps/openssl/openssl/providers/implementations/rands/test_rng.c b/deps/openssl/openssl/providers/implementations/rands/test_rng.c index 4e7fed0fc7b1f8..e3b91368e80f20 100644 --- a/deps/openssl/openssl/providers/implementations/rands/test_rng.c +++ b/deps/openssl/openssl/providers/implementations/rands/test_rng.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -125,16 +125,18 @@ static int test_rng_reseed(ossl_unused void *vtest, static size_t test_rng_nonce(void *vtest, unsigned char *out, unsigned int strength, ossl_unused size_t min_noncelen, - ossl_unused size_t max_noncelen) + size_t max_noncelen) { PROV_TEST_RNG *t = (PROV_TEST_RNG *)vtest; + size_t i; if (t->nonce == NULL || strength > t->strength) return 0; + i = t->nonce_len > max_noncelen ? max_noncelen : t->nonce_len; if (out != NULL) - memcpy(out, t->nonce, t->nonce_len); - return t->nonce_len; + memcpy(out, t->nonce, i); + return i; } static int test_rng_get_ctx_params(void *vtest, OSSL_PARAM params[]) diff --git a/deps/openssl/openssl/ssl/record/rec_layer_d1.c b/deps/openssl/openssl/ssl/record/rec_layer_d1.c index 79de0219ec7bae..3e5ec6aec4dd6f 100644 --- a/deps/openssl/openssl/ssl/record/rec_layer_d1.c +++ b/deps/openssl/openssl/ssl/record/rec_layer_d1.c @@ -1,5 +1,5 @@ /* - * Copyright 2005-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2005-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -606,6 +606,17 @@ int dtls1_read_bytes(SSL *s, int type, int *recvd_type, unsigned char *buf, #endif s->shutdown |= SSL_RECEIVED_SHUTDOWN; return 0; + } else if (alert_descr == SSL_AD_NO_RENEGOTIATION) { + /* + * This is a warning but we receive it if we requested + * renegotiation and the peer denied it. Terminate with a fatal + * alert because if the application tried to renegotiate it + * presumably had a good reason and expects it to succeed. In + * the future we might have a renegotiation where we don't care + * if the peer refused it where we carry on. + */ + SSLfatal(s, SSL_AD_HANDSHAKE_FAILURE, SSL_R_NO_RENEGOTIATION); + return -1; } } else if (alert_level == SSL3_AL_FATAL) { s->rwstate = SSL_NOTHING; diff --git a/deps/openssl/openssl/ssl/record/rec_layer_s3.c b/deps/openssl/openssl/ssl/record/rec_layer_s3.c index 779e998bb6ee06..89ab1f4f1a44f3 100644 --- a/deps/openssl/openssl/ssl/record/rec_layer_s3.c +++ b/deps/openssl/openssl/ssl/record/rec_layer_s3.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1613,10 +1613,10 @@ int ssl3_read_bytes(SSL *s, int type, int *recvd_type, unsigned char *buf, /* * This is a warning but we receive it if we requested * renegotiation and the peer denied it. Terminate with a fatal - * alert because if application tried to renegotiate it + * alert because if the application tried to renegotiate it * presumably had a good reason and expects it to succeed. In - * future we might have a renegotiation where we don't care if - * the peer refused it where we carry on. + * the future we might have a renegotiation where we don't care + * if the peer refused it where we carry on. */ SSLfatal(s, SSL_AD_HANDSHAKE_FAILURE, SSL_R_NO_RENEGOTIATION); return -1; diff --git a/deps/openssl/openssl/ssl/s3_lib.c b/deps/openssl/openssl/ssl/s3_lib.c index bcfe57b46f083b..7c76ae13db7653 100644 --- a/deps/openssl/openssl/ssl/s3_lib.c +++ b/deps/openssl/openssl/ssl/s3_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * Copyright 2005 Nokia. All rights reserved. * @@ -4820,7 +4820,10 @@ int ssl_derive(SSL *s, EVP_PKEY *privkey, EVP_PKEY *pubkey, int gensecret) } if (EVP_PKEY_derive(pctx, pms, &pmslen) <= 0) { - SSLfatal(s, SSL_AD_INTERNAL_ERROR, ERR_R_INTERNAL_ERROR); + /* + * the public key was probably a weak key + */ + SSLfatal(s, SSL_AD_ILLEGAL_PARAMETER, SSL_R_BAD_KEY_SHARE); goto err; } @@ -4923,7 +4926,7 @@ int ssl_encapsulate(SSL *s, EVP_PKEY *pubkey, } if (EVP_PKEY_encapsulate(pctx, ct, &ctlen, pms, &pmslen) <= 0) { - SSLfatal(s, SSL_AD_INTERNAL_ERROR, ERR_R_INTERNAL_ERROR); + SSLfatal(s, SSL_AD_ILLEGAL_PARAMETER, SSL_R_BAD_KEY_SHARE); goto err; } diff --git a/deps/openssl/openssl/ssl/ssl_cert.c b/deps/openssl/openssl/ssl/ssl_cert.c index 2e2d09a32ee4aa..a8d6df924bce9a 100644 --- a/deps/openssl/openssl/ssl/ssl_cert.c +++ b/deps/openssl/openssl/ssl/ssl_cert.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -812,16 +812,17 @@ static int add_uris_recursive(STACK_OF(X509_NAME) *stack, OSSL_STORE_CTX *ctx = NULL; X509 *x = NULL; X509_NAME *xn = NULL; + OSSL_STORE_INFO *info = NULL; if ((ctx = OSSL_STORE_open(uri, NULL, NULL, NULL, NULL)) == NULL) goto err; while (!OSSL_STORE_eof(ctx) && !OSSL_STORE_error(ctx)) { - OSSL_STORE_INFO *info = OSSL_STORE_load(ctx); - int infotype = info == 0 ? 0 : OSSL_STORE_INFO_get_type(info); + int infotype; - if (info == NULL) + if ((info = OSSL_STORE_load(ctx)) == NULL) continue; + infotype = OSSL_STORE_INFO_get_type(info); if (infotype == OSSL_STORE_INFO_NAME) { /* @@ -846,6 +847,7 @@ static int add_uris_recursive(STACK_OF(X509_NAME) *stack, } OSSL_STORE_INFO_free(info); + info = NULL; } ERR_clear_error(); @@ -853,6 +855,7 @@ static int add_uris_recursive(STACK_OF(X509_NAME) *stack, err: ok = 0; + OSSL_STORE_INFO_free(info); done: OSSL_STORE_close(ctx); diff --git a/deps/openssl/openssl/ssl/ssl_sess.c b/deps/openssl/openssl/ssl/ssl_sess.c index ec937a321c3018..72b6dae677e30c 100644 --- a/deps/openssl/openssl/ssl/ssl_sess.c +++ b/deps/openssl/openssl/ssl/ssl_sess.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright 2005 Nokia. All rights reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -612,6 +612,8 @@ int ssl_get_prev_session(SSL *s, CLIENTHELLO_MSG *hello) SSL_TICKET_STATUS r; if (SSL_IS_TLS13(s)) { + SSL_SESSION_free(s->session); + s->session = NULL; /* * By default we will send a new ticket. This can be overridden in the * ticket processing. @@ -624,6 +626,7 @@ int ssl_get_prev_session(SSL *s, CLIENTHELLO_MSG *hello) hello->pre_proc_exts, NULL, 0)) return -1; + /* If we resumed, s->session will now be set */ ret = s->session; } else { /* sets s->ext.ticket_expected */ diff --git a/deps/openssl/openssl/ssl/statem/extensions_srvr.c b/deps/openssl/openssl/ssl/statem/extensions_srvr.c index 72c00574be6863..b8d55e144cfc9a 100644 --- a/deps/openssl/openssl/ssl/statem/extensions_srvr.c +++ b/deps/openssl/openssl/ssl/statem/extensions_srvr.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2025 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -275,7 +275,13 @@ int tls_parse_ctos_sig_algs_cert(SSL *s, PACKET *pkt, return 0; } - if (!s->hit && !tls1_save_sigalgs(s, &supported_sig_algs, 1)) { + /* + * We use this routine on both clients and servers, and when clients + * get asked for PHA we need to always save the sigalgs regardless + * of whether it was a resumption or not. + */ + if ((!s->server || (s->server && !s->hit)) + && !tls1_save_sigalgs(s, &supported_sig_algs, 1)) { SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_BAD_EXTENSION); return 0; } @@ -294,7 +300,13 @@ int tls_parse_ctos_sig_algs(SSL *s, PACKET *pkt, unsigned int context, X509 *x, return 0; } - if (!s->hit && !tls1_save_sigalgs(s, &supported_sig_algs, 0)) { + /* + * We use this routine on both clients and servers, and when clients + * get asked for PHA we need to always save the sigalgs regardless + * of whether it was a resumption or not. + */ + if ((!s->server || (s->server && !s->hit)) + && !tls1_save_sigalgs(s, &supported_sig_algs, 0)) { SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_BAD_EXTENSION); return 0; } diff --git a/deps/openssl/openssl/ssl/statem/statem_lib.c b/deps/openssl/openssl/ssl/statem/statem_lib.c index 6f0eaa5d6c0d21..422d0428d465bb 100644 --- a/deps/openssl/openssl/ssl/statem/statem_lib.c +++ b/deps/openssl/openssl/ssl/statem/statem_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2025 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -1967,23 +1967,24 @@ int ssl_choose_client_version(SSL *s, int version, RAW_EXTENSION *extensions) real_max = ver_max; /* Check for downgrades */ - if (s->version == TLS1_2_VERSION && real_max > s->version) { - if (memcmp(tls12downgrade, + if (!SSL_IS_DTLS(s) && real_max > s->version) { + /* Signal applies to all versions */ + if (memcmp(tls11downgrade, s->s3.server_random + SSL3_RANDOM_SIZE - - sizeof(tls12downgrade), - sizeof(tls12downgrade)) == 0) { + - sizeof(tls11downgrade), + sizeof(tls11downgrade)) == 0) { s->version = origv; SSLfatal(s, SSL_AD_ILLEGAL_PARAMETER, SSL_R_INAPPROPRIATE_FALLBACK); return 0; } - } else if (!SSL_IS_DTLS(s) - && s->version < TLS1_2_VERSION - && real_max > s->version) { - if (memcmp(tls11downgrade, - s->s3.server_random + SSL3_RANDOM_SIZE - - sizeof(tls11downgrade), - sizeof(tls11downgrade)) == 0) { + /* Only when accepting TLS1.3 */ + if (real_max == TLS1_3_VERSION + && memcmp(tls12downgrade, + s->s3.server_random + SSL3_RANDOM_SIZE + - sizeof(tls12downgrade), + sizeof(tls12downgrade)) == 0) { + s->version = origv; SSLfatal(s, SSL_AD_ILLEGAL_PARAMETER, SSL_R_INAPPROPRIATE_FALLBACK); diff --git a/deps/openssl/openssl/tools/c_rehash.in b/deps/openssl/openssl/tools/c_rehash.in index 343cdc1e7575e9..c056001ea3783c 100644 --- a/deps/openssl/openssl/tools/c_rehash.in +++ b/deps/openssl/openssl/tools/c_rehash.in @@ -1,7 +1,7 @@ #!{- $config{HASHBANGPERL} -} {- use OpenSSL::Util; -} # {- join("\n# ", @autowarntext) -} -# Copyright 1999-2022 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 1999-2025 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -63,10 +63,10 @@ if (defined(&Cwd::getcwd)) { my $path_delim = ($pwd =~ /^[a-z]\:/i) ? ';' : ':'; $ENV{PATH} = "$prefix/bin" . ($ENV{PATH} ? $path_delim . $ENV{PATH} : ""); -if (! -x $openssl) { +if (!(-f $openssl && -x $openssl)) { my $found = 0; foreach (split /$path_delim/, $ENV{PATH}) { - if (-x "$_/$openssl") { + if (-f "$_/$openssl" && -x "$_/$openssl") { $found = 1; $openssl = "$_/$openssl"; last; @@ -88,7 +88,7 @@ if (@ARGV) { if (-d $dirlist[0]) { chdir $dirlist[0]; - $openssl="$pwd/$openssl" if (!-x $openssl); + $openssl="$pwd/$openssl" if (!(-f $openssl && -x $openssl)); chdir $pwd; } diff --git a/deps/openssl/openssl/util/wrap.pl.in b/deps/openssl/openssl/util/wrap.pl.in index 5126513d4c3e91..436ec12fcf06c4 100644 --- a/deps/openssl/openssl/util/wrap.pl.in +++ b/deps/openssl/openssl/util/wrap.pl.in @@ -18,6 +18,38 @@ BEGIN { OpenSSL::Util->import(); } +sub quote_cmd_win32 { + my $cmd = ""; + + foreach my $arg (@_) { + if ($arg =~ m{\A[\w,-./@]+\z}) { + $cmd .= $arg . q{ };; + } else { + $cmd .= q{"} . quote_arg_win32($arg) . q{" }; + } + } + return substr($cmd, 0, -1); +} + +sub quote_arg_win32 { + my ($arg) = @_; + my $val = ""; + + pos($arg) = 0; + while (1) { + return $val if (pos($arg) == length($arg)); + if ($arg =~ m{\G((?:(?>[\\]*)[^"\\]+)+)}ogc) { + $val .= $1; + } elsif ($arg =~ m{\G"}ogc) { + $val .= qq{\\"}; + } elsif ($arg =~ m{\G((?>[\\]+)(?="|\z))}ogc) { + $val .= qq{\\} x (2 * length($1)); + } else { + die sprintf("Internal error quoting: '%s'\n", $arg); + } + } +} + my $there = canonpath(catdir(dirname($0), updir())); my $std_engines = catdir($there, 'engines'); my $std_providers = catdir($there, 'providers'); @@ -60,7 +92,12 @@ if ($^O eq 'VMS') { # The exec() statement on MSWin32 doesn't seem to give back the exit code # from the call, so we resort to using system() instead. -my $waitcode = system @cmd; +my $waitcode; +if ($^O eq 'MSWin32') { + $waitcode = system(quote_cmd_win32(@cmd)); +} else { + $waitcode = system @cmd; +} # According to documentation, -1 means that system() couldn't run the command, # otherwise, the value is similar to the Unix wait() status value diff --git a/deps/openssl/openssl_common.gypi b/deps/openssl/openssl_common.gypi index 4312359e947643..0650cdc8a03b02 100644 --- a/deps/openssl/openssl_common.gypi +++ b/deps/openssl/openssl_common.gypi @@ -8,6 +8,7 @@ 'openssl/crypto/ec/curve448', 'openssl/crypto/ec/curve448/arch_32', 'openssl/providers/common/include', + 'openssl/providers/fips/include', 'openssl/providers/implementations/include', 'config/', ], diff --git a/deps/sqlite/sqlite3.c b/deps/sqlite/sqlite3.c index 0b071b2b6cc259..26a7a43d8658be 100644 --- a/deps/sqlite/sqlite3.c +++ b/deps/sqlite/sqlite3.c @@ -1,6 +1,6 @@ /****************************************************************************** ** This file is an amalgamation of many separate C source files from SQLite -** version 3.50.2. By combining all the individual C code files into this +** version 3.50.4. By combining all the individual C code files into this ** single large file, the entire code can be compiled as a single translation ** unit. This allows many compilers to do optimizations that would not be ** possible if the files were compiled separately. Performance improvements @@ -18,7 +18,7 @@ ** separate file. This file contains only code for the core SQLite library. ** ** The content in this amalgamation comes from Fossil check-in -** 2af157d77fb1304a74176eaee7fbc7c7e932 with changes in files: +** 4d8adfb30e03f9cf27f800a2c1ba3c48fb4c with changes in files: ** ** */ @@ -465,9 +465,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.2" -#define SQLITE_VERSION_NUMBER 3050002 -#define SQLITE_SOURCE_ID "2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079" +#define SQLITE_VERSION "3.50.4" +#define SQLITE_VERSION_NUMBER 3050004 +#define SQLITE_SOURCE_ID "2025-07-30 19:33:53 4d8adfb30e03f9cf27f800a2c1ba3c48fb4ca1b08b0f5ed59a4d5ecbf45e20a3" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -9377,13 +9377,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
SQLITE_STMTSTATUS_SORT
**
^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance through careful use of indices.
+** improve performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
SQLITE_STMTSTATUS_AUTOINDEX
**
^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance by adding permanent indices that do not +** improve performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
** ** [[SQLITE_STMTSTATUS_VM_STEP]]
SQLITE_STMTSTATUS_VM_STEP
@@ -9392,19 +9392,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
SQLITE_STMTSTATUS_REPREPARE
**
^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan. +** [bound parameters] that might affect the query plan.
** ** [[SQLITE_STMTSTATUS_RUN]]
SQLITE_STMTSTATUS_RUN
**
^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle. +** cycle.
** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9414,7 +9414,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
SQLITE_STMTSTATUS_MEMUSED
**
^This is the approximate number of bytes of heap memory @@ -9519,9 +9519,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always a power of two. ^The +** be allocated by the cache. ^szPage will always be a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will +** associated with each page cache entry. ^The szExtra parameter will be ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9529,17 +9529,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based with the value of bPurgeable; +** does not have to do anything special based upon the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable false will +** ^Hence, a cache created with bPurgeable set to false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored by) the cache +** suggested maximum cache-size (number of pages stored) for the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9566,12 +9566,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determined what action to take: +** parameter to help it determine what action to take: ** ** **
createFlag Behavior when page is not already in cache **
0 Do not allocate a new page. Return NULL. -**
1 Allocate a new page if it easy and convenient to do so. +**
1 Allocate a new page if it is easy and convenient to do so. ** Otherwise return NULL. **
2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9588,7 +9588,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of +** zero, then the page may be discarded or retained at the discretion of the ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9606,7 +9606,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they are implicitly unpinned, meaning that +** of these pages are pinned, they become implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9905,7 +9905,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connections current transaction is concluded. ^The +** when the blocking connection's current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9925,7 +9925,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connections +** unlock-notify callback is canceled. ^The blocked connection's ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -10323,7 +10323,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the users SQL statement, regardless of the actual +** specified as part of the user's SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -10357,7 +10357,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
SQLITE_VTAB_INNOCUOUS
**
Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** the [xConnect] or [xCreate] methods of a [virtual table] implementation +** [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10525,7 +10525,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
** ** ^For the purposes of comparing virtual table output values to see if the -** values are same value for sorting purposes, two NULL values are considered +** values are the same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10535,7 +10535,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** the "orderByConsumed" flag is unset, the query planner will add extra +** "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10632,7 +10632,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing use the [sqlite3_vtab_in()] interface in the +** processing using the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10687,7 +10687,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10715,8 +10715,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation what the [ON CONFLICT] mode -** is for the SQL statement being evaluated. +** inform a [virtual table] implementation of the [ON CONFLICT] mode +** for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10756,39 +10756,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
SQLITE_SCANSTAT_EST
**
^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimates was accurate, +** iteration of the X-th loop. If the query planner's estimate was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop. +** be the NLOOP value for the current loop.
** ** [[SQLITE_SCANSTAT_NAME]]
SQLITE_SCANSTAT_NAME
**
^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop. +** used for the X-th loop.
** ** [[SQLITE_SCANSTAT_EXPLAIN]]
SQLITE_SCANSTAT_EXPLAIN
**
^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop. +** description for the X-th loop.
** ** [[SQLITE_SCANSTAT_SELECTID]]
SQLITE_SCANSTAT_SELECTID
**
^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query. +** column of an [EXPLAIN QUERY PLAN] query.
** ** [[SQLITE_SCANSTAT_PARENTID]]
SQLITE_SCANSTAT_PARENTID
**
The "int" variable pointed to by the V parameter will be set to the -** the id of the parent of the current query element, if applicable, or +** id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query. +** returned in the second column of an [EXPLAIN QUERY PLAN] query.
** ** [[SQLITE_SCANSTAT_NCYCLE]]
SQLITE_SCANSTAT_NCYCLE
**
The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1. +** set to -1.
** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10829,8 +10829,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may be -** to query for statistics regarding the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may +** retrieve statistics for the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10987,8 +10987,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE. This is because the -** in this case the new values are not available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE, because +** the new values are not yet available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -11241,7 +11241,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database where backed up to disk. +** to disk if that database were backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -11250,7 +11250,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if the no such contiguous +** is currently using for that database, or NULL if no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -11321,7 +11321,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialized into the TEMP database. If the +** It is not possible to deserialize into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -11343,7 +11343,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number bytes in the deserialization */ + sqlite3_int64 szDb, /* Number of bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -11351,7 +11351,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for 6th argument (the F argument) to +** The following are allowed values for the 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization @@ -19168,7 +19168,6 @@ struct Index { unsigned hasStat1:1; /* aiRowLogEst values come from sqlite_stat1 */ unsigned bNoQuery:1; /* Do not use this index to optimize queries */ unsigned bAscKeyBug:1; /* True if the bba7b69f9849b5bf bug applies */ - unsigned bIdxRowid:1; /* One or more of the index keys is the ROWID */ unsigned bHasVCol:1; /* Index references one or more VIRTUAL columns */ unsigned bHasExpr:1; /* Index contains an expression, either a literal ** expression, or a reference to a VIRTUAL column */ @@ -19441,6 +19440,7 @@ struct Expr { Table *pTab; /* TK_COLUMN: Table containing column. Can be NULL ** for a column of an index on an expression */ Window *pWin; /* EP_WinFunc: Window/Filter defn for a function */ + int nReg; /* TK_NULLS: Number of registers to NULL out */ struct { /* TK_IN, TK_SELECT, and TK_EXISTS */ int iAddr; /* Subroutine entry address */ int regReturn; /* Register used to hold return address */ @@ -21475,6 +21475,7 @@ SQLITE_PRIVATE void sqlite3ExprCodeGeneratedColumn(Parse*, Table*, Column*, int) SQLITE_PRIVATE void sqlite3ExprCodeCopy(Parse*, Expr*, int); SQLITE_PRIVATE void sqlite3ExprCodeFactorable(Parse*, Expr*, int); SQLITE_PRIVATE int sqlite3ExprCodeRunJustOnce(Parse*, Expr*, int); +SQLITE_PRIVATE void sqlite3ExprNullRegisterRange(Parse*, int, int); SQLITE_PRIVATE int sqlite3ExprCodeTemp(Parse*, Expr*, int*); SQLITE_PRIVATE int sqlite3ExprCodeTarget(Parse*, Expr*, int); SQLITE_PRIVATE int sqlite3ExprCodeExprList(Parse*, ExprList*, int, int, u8); @@ -111488,7 +111489,7 @@ SQLITE_PRIVATE Expr *sqlite3ExprAnd(Parse *pParse, Expr *pLeft, Expr *pRight){ return pLeft; }else{ u32 f = pLeft->flags | pRight->flags; - if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse))==EP_IsFalse + if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse|EP_HasFunc))==EP_IsFalse && !IN_RENAME_OBJECT ){ sqlite3ExprDeferredDelete(pParse, pLeft); @@ -115242,6 +115243,12 @@ SQLITE_PRIVATE int sqlite3ExprCodeTarget(Parse *pParse, Expr *pExpr, int target) sqlite3VdbeLoadString(v, target, pExpr->u.zToken); return target; } + case TK_NULLS: { + /* Set a range of registers to NULL. pExpr->y.nReg registers starting + ** with target */ + sqlite3VdbeAddOp3(v, OP_Null, 0, target, target + pExpr->y.nReg - 1); + return target; + } default: { /* Make NULL the default case so that if a bug causes an illegal ** Expr node to be passed into this function, it will be handled @@ -115926,6 +115933,25 @@ SQLITE_PRIVATE int sqlite3ExprCodeRunJustOnce( return regDest; } +/* +** Make arrangements to invoke OP_Null on a range of registers +** during initialization. +*/ +SQLITE_PRIVATE SQLITE_NOINLINE void sqlite3ExprNullRegisterRange( + Parse *pParse, /* Parsing context */ + int iReg, /* First register to set to NULL */ + int nReg /* Number of sequential registers to NULL out */ +){ + u8 okConstFactor = pParse->okConstFactor; + Expr t; + memset(&t, 0, sizeof(t)); + t.op = TK_NULLS; + t.y.nReg = nReg; + pParse->okConstFactor = 1; + sqlite3ExprCodeRunJustOnce(pParse, &t, iReg); + pParse->okConstFactor = okConstFactor; +} + /* ** Generate code to evaluate an expression and store the results ** into a register. Return the register number where the results @@ -127196,7 +127222,6 @@ SQLITE_PRIVATE void sqlite3CreateIndex( assert( j<=0x7fff ); if( j<0 ){ j = pTab->iPKey; - pIndex->bIdxRowid = 1; }else{ if( pTab->aCol[j].notNull==0 ){ pIndex->uniqNotNull = 0; @@ -153177,6 +153202,7 @@ SQLITE_PRIVATE int sqlite3Select( sqlite3VdbeAddOp2(v, OP_Integer, 0, iAbortFlag); VdbeComment((v, "clear abort flag")); sqlite3VdbeAddOp3(v, OP_Null, 0, iAMem, iAMem+pGroupBy->nExpr-1); + sqlite3ExprNullRegisterRange(pParse, iAMem, pGroupBy->nExpr); /* Begin a loop that will extract all source rows in GROUP BY order. ** This might involve two separate loops with an OP_Sort in between, or @@ -160173,7 +160199,9 @@ static Expr *removeUnindexableInClauseTerms( int iField; assert( (pLoop->aLTerm[i]->eOperator & (WO_OR|WO_AND))==0 ); iField = pLoop->aLTerm[i]->u.x.iField - 1; - if( pOrigRhs->a[iField].pExpr==0 ) continue; /* Duplicate PK column */ + if( NEVER(pOrigRhs->a[iField].pExpr==0) ){ + continue; /* Duplicate PK column */ + } pRhs = sqlite3ExprListAppend(pParse, pRhs, pOrigRhs->a[iField].pExpr); pOrigRhs->a[iField].pExpr = 0; if( pRhs ) pRhs->a[pRhs->nExpr-1].u.x.iOrderByCol = iField+1; @@ -160270,7 +160298,7 @@ static SQLITE_NOINLINE void codeINTerm( return; } } - for(i=iEq;inLTerm; i++){ + for(i=iEq; inLTerm; i++){ assert( pLoop->aLTerm[i]!=0 ); if( pLoop->aLTerm[i]->pExpr==pX ) nEq++; } @@ -160279,22 +160307,13 @@ static SQLITE_NOINLINE void codeINTerm( if( !ExprUseXSelect(pX) || pX->x.pSelect->pEList->nExpr==1 ){ eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, 0, &iTab); }else{ - Expr *pExpr = pTerm->pExpr; - if( pExpr->iTable==0 || !ExprHasProperty(pExpr, EP_Subrtn) ){ - sqlite3 *db = pParse->db; - pX = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); - if( !db->mallocFailed ){ - aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*nEq); - eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap,&iTab); - pExpr->iTable = iTab; - } - sqlite3ExprDelete(db, pX); - }else{ - int n = sqlite3ExprVectorSize(pX->pLeft); - aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*MAX(nEq,n)); - eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap, &iTab); + sqlite3 *db = pParse->db; + Expr *pXMod = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); + if( !db->mallocFailed ){ + aiMap = (int*)sqlite3DbMallocZero(db, sizeof(int)*nEq); + eType = sqlite3FindInIndex(pParse, pXMod, IN_INDEX_LOOP, 0, aiMap, &iTab); } - pX = pExpr; + sqlite3ExprDelete(db, pXMod); } if( eType==IN_INDEX_INDEX_DESC ){ @@ -160324,7 +160343,7 @@ static SQLITE_NOINLINE void codeINTerm( if( pIn ){ int iMap = 0; /* Index in aiMap[] */ pIn += i; - for(i=iEq;inLTerm; i++){ + for(i=iEq; inLTerm; i++){ if( pLoop->aLTerm[i]->pExpr==pX ){ int iOut = iTarget + i - iEq; if( eType==IN_INDEX_ROWID ){ @@ -167682,6 +167701,7 @@ static int whereLoopAddBtreeIndex( if( ExprUseXSelect(pExpr) ){ /* "x IN (SELECT ...)": TUNING: the SELECT returns 25 rows */ int i; + int bRedundant = 0; nIn = 46; assert( 46==sqlite3LogEst(25) ); /* The expression may actually be of the form (x, y) IN (SELECT...). @@ -167690,7 +167710,20 @@ static int whereLoopAddBtreeIndex( ** for each such term. The following loop checks that pTerm is the ** first such term in use, and sets nIn back to 0 if it is not. */ for(i=0; inLTerm-1; i++){ - if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ) nIn = 0; + if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ){ + nIn = 0; + if( pNew->aLTerm[i]->u.x.iField == pTerm->u.x.iField ){ + /* Detect when two or more columns of an index match the same + ** column of a vector IN operater, and avoid adding the column + ** to the WhereLoop more than once. See tag-20250707-01 + ** in test/rowvalue.test */ + bRedundant = 1; + } + } + } + if( bRedundant ){ + pNew->nLTerm--; + continue; } }else if( ALWAYS(pExpr->x.pList && pExpr->x.pList->nExpr) ){ /* "x IN (value, value, ...)" */ @@ -167922,7 +167955,7 @@ static int whereLoopAddBtreeIndex( if( (pNew->wsFlags & WHERE_TOP_LIMIT)==0 && pNew->u.btree.nEqnColumn && (pNew->u.btree.nEqnKeyCol || - (pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY && !pProbe->bIdxRowid)) + pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY) ){ if( pNew->u.btree.nEq>3 ){ sqlite3ProgressCheck(pParse); @@ -168465,6 +168498,7 @@ static int whereLoopAddBtree( pNew->u.btree.nEq = 0; pNew->u.btree.nBtm = 0; pNew->u.btree.nTop = 0; + pNew->u.btree.nDistinctCol = 0; pNew->nSkip = 0; pNew->nLTerm = 0; pNew->iSortIdx = 0; @@ -169533,8 +169567,6 @@ static i8 wherePathSatisfiesOrderBy( obSat = obDone; } break; - }else if( wctrlFlags & WHERE_DISTINCTBY ){ - pLoop->u.btree.nDistinctCol = 0; } iCur = pWInfo->pTabList->a[pLoop->iTab].iCursor; @@ -179897,12 +179929,21 @@ static YYACTIONTYPE yy_reduce( ** expr1 IN () ** expr1 NOT IN () ** - ** simplify to constants 0 (false) and 1 (true), respectively, - ** regardless of the value of expr1. + ** simplify to constants 0 (false) and 1 (true), respectively. + ** + ** Except, do not apply this optimization if expr1 contains a function + ** because that function might be an aggregate (we don't know yet whether + ** it is or not) and if it is an aggregate, that could change the meaning + ** of the whole query. */ - sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); - yymsp[-4].minor.yy590 = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); - if( yymsp[-4].minor.yy590 ) sqlite3ExprIdToTrueFalse(yymsp[-4].minor.yy590); + Expr *pB = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); + if( pB ) sqlite3ExprIdToTrueFalse(pB); + if( !ExprHasProperty(yymsp[-4].minor.yy590, EP_HasFunc) ){ + sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); + yymsp[-4].minor.yy590 = pB; + }else{ + yymsp[-4].minor.yy590 = sqlite3PExpr(pParse, yymsp[-3].minor.yy502 ? TK_OR : TK_AND, pB, yymsp[-4].minor.yy590); + } }else{ Expr *pRHS = yymsp[-1].minor.yy402->a[0].pExpr; if( yymsp[-1].minor.yy402->nExpr==1 && sqlite3ExprIsConstant(pParse,pRHS) && yymsp[-4].minor.yy590->op!=TK_VECTOR ){ @@ -181508,7 +181549,7 @@ static int getToken(const unsigned char **pz){ int t; /* Token type to return */ do { z += sqlite3GetToken(z, &t); - }while( t==TK_SPACE ); + }while( t==TK_SPACE || t==TK_COMMENT ); if( t==TK_ID || t==TK_STRING || t==TK_JOIN_KW @@ -246163,9 +246204,9 @@ static void fts5SegIterSetNext(Fts5Index *p, Fts5SegIter *pIter){ ** leave an error in the Fts5Index object. */ static void fts5SegIterAllocTombstone(Fts5Index *p, Fts5SegIter *pIter){ - const int nTomb = pIter->pSeg->nPgTombstone; + const i64 nTomb = (i64)pIter->pSeg->nPgTombstone; if( nTomb>0 ){ - int nByte = SZ_FTS5TOMBSTONEARRAY(nTomb+1); + i64 nByte = SZ_FTS5TOMBSTONEARRAY(nTomb+1); Fts5TombstoneArray *pNew; pNew = (Fts5TombstoneArray*)sqlite3Fts5MallocZero(&p->rc, nByte); if( pNew ){ @@ -257266,7 +257307,7 @@ static void fts5SourceIdFunc( ){ assert( nArg==0 ); UNUSED_PARAM2(nArg, apUnused); - sqlite3_result_text(pCtx, "fts5: 2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079", -1, SQLITE_TRANSIENT); + sqlite3_result_text(pCtx, "fts5: 2025-07-30 19:33:53 4d8adfb30e03f9cf27f800a2c1ba3c48fb4ca1b08b0f5ed59a4d5ecbf45e20a3", -1, SQLITE_TRANSIENT); } /* diff --git a/deps/sqlite/sqlite3.h b/deps/sqlite/sqlite3.h index f56dd8d86a2dda..c2ed750305b244 100644 --- a/deps/sqlite/sqlite3.h +++ b/deps/sqlite/sqlite3.h @@ -146,9 +146,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.2" -#define SQLITE_VERSION_NUMBER 3050002 -#define SQLITE_SOURCE_ID "2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079" +#define SQLITE_VERSION "3.50.4" +#define SQLITE_VERSION_NUMBER 3050004 +#define SQLITE_SOURCE_ID "2025-07-30 19:33:53 4d8adfb30e03f9cf27f800a2c1ba3c48fb4ca1b08b0f5ed59a4d5ecbf45e20a3" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -9058,13 +9058,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
SQLITE_STMTSTATUS_SORT
**
^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance through careful use of indices.
+** improve performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
SQLITE_STMTSTATUS_AUTOINDEX
**
^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance by adding permanent indices that do not +** improve performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
** ** [[SQLITE_STMTSTATUS_VM_STEP]]
SQLITE_STMTSTATUS_VM_STEP
@@ -9073,19 +9073,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
SQLITE_STMTSTATUS_REPREPARE
**
^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan. +** [bound parameters] that might affect the query plan.
** ** [[SQLITE_STMTSTATUS_RUN]]
SQLITE_STMTSTATUS_RUN
**
^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle. +** cycle.
** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9095,7 +9095,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
SQLITE_STMTSTATUS_MEMUSED
**
^This is the approximate number of bytes of heap memory @@ -9200,9 +9200,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always a power of two. ^The +** be allocated by the cache. ^szPage will always be a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will +** associated with each page cache entry. ^The szExtra parameter will be ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9210,17 +9210,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based with the value of bPurgeable; +** does not have to do anything special based upon the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable false will +** ^Hence, a cache created with bPurgeable set to false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored by) the cache +** suggested maximum cache-size (number of pages stored) for the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9247,12 +9247,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determined what action to take: +** parameter to help it determine what action to take: ** ** **
createFlag Behavior when page is not already in cache **
0 Do not allocate a new page. Return NULL. -**
1 Allocate a new page if it easy and convenient to do so. +**
1 Allocate a new page if it is easy and convenient to do so. ** Otherwise return NULL. **
2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9269,7 +9269,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of +** zero, then the page may be discarded or retained at the discretion of the ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9287,7 +9287,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they are implicitly unpinned, meaning that +** of these pages are pinned, they become implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9586,7 +9586,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connections current transaction is concluded. ^The +** when the blocking connection's current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9606,7 +9606,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connections +** unlock-notify callback is canceled. ^The blocked connection's ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -10004,7 +10004,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the users SQL statement, regardless of the actual +** specified as part of the user's SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -10038,7 +10038,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
SQLITE_VTAB_INNOCUOUS
**
Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** the [xConnect] or [xCreate] methods of a [virtual table] implementation +** [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10206,7 +10206,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
** ** ^For the purposes of comparing virtual table output values to see if the -** values are same value for sorting purposes, two NULL values are considered +** values are the same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10216,7 +10216,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** the "orderByConsumed" flag is unset, the query planner will add extra +** "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10313,7 +10313,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing use the [sqlite3_vtab_in()] interface in the +** processing using the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10368,7 +10368,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10396,8 +10396,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation what the [ON CONFLICT] mode -** is for the SQL statement being evaluated. +** inform a [virtual table] implementation of the [ON CONFLICT] mode +** for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10437,39 +10437,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
SQLITE_SCANSTAT_EST
**
^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimates was accurate, +** iteration of the X-th loop. If the query planner's estimate was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop. +** be the NLOOP value for the current loop.
** ** [[SQLITE_SCANSTAT_NAME]]
SQLITE_SCANSTAT_NAME
**
^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop. +** used for the X-th loop.
** ** [[SQLITE_SCANSTAT_EXPLAIN]]
SQLITE_SCANSTAT_EXPLAIN
**
^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop. +** description for the X-th loop.
** ** [[SQLITE_SCANSTAT_SELECTID]]
SQLITE_SCANSTAT_SELECTID
**
^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query. +** column of an [EXPLAIN QUERY PLAN] query.
** ** [[SQLITE_SCANSTAT_PARENTID]]
SQLITE_SCANSTAT_PARENTID
**
The "int" variable pointed to by the V parameter will be set to the -** the id of the parent of the current query element, if applicable, or +** id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query. +** returned in the second column of an [EXPLAIN QUERY PLAN] query.
** ** [[SQLITE_SCANSTAT_NCYCLE]]
SQLITE_SCANSTAT_NCYCLE
**
The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1. +** set to -1.
** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10510,8 +10510,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may be -** to query for statistics regarding the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may +** retrieve statistics for the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10668,8 +10668,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE. This is because the -** in this case the new values are not available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE, because +** the new values are not yet available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -10922,7 +10922,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database where backed up to disk. +** to disk if that database were backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -10931,7 +10931,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if the no such contiguous +** is currently using for that database, or NULL if no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -11002,7 +11002,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialized into the TEMP database. If the +** It is not possible to deserialize into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -11024,7 +11024,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number bytes in the deserialization */ + sqlite3_int64 szDb, /* Number of bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -11032,7 +11032,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for 6th argument (the F argument) to +** The following are allowed values for the 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization diff --git a/deps/v8/include/v8-persistent-handle.h b/deps/v8/include/v8-persistent-handle.h index 9db5af5dddd557..7e3cf5084e41d8 100644 --- a/deps/v8/include/v8-persistent-handle.h +++ b/deps/v8/include/v8-persistent-handle.h @@ -507,9 +507,16 @@ V8_INLINE void PersistentBase::SetWeak( #if (__GNUC__ >= 8) && !defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wcast-function-type" +#endif +#if __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wcast-function-type" #endif api_internal::MakeWeak(this->slot(), parameter, reinterpret_cast(callback), type); +#if __clang__ +#pragma clang diagnostic pop +#endif #if (__GNUC__ >= 8) && !defined(__clang__) #pragma GCC diagnostic pop #endif diff --git a/deps/v8/src/base/platform/platform-posix.cc b/deps/v8/src/base/platform/platform-posix.cc index 529a073040af1f..8e43f94ee8c3ba 100644 --- a/deps/v8/src/base/platform/platform-posix.cc +++ b/deps/v8/src/base/platform/platform-posix.cc @@ -76,7 +76,19 @@ #define MAP_ANONYMOUS MAP_ANON #endif -#if defined(V8_OS_SOLARIS) +/* + * NOTE: illumos starting with illumos#14418 (pushed April 20th, 2022) + * prototypes madvise(3C) properly with a `void *` first argument. + * The only way to detect this outside of configure-time checking is to + * check for the existence of MEMCNTL_SHARED, which gets defined for the first + * time in illumos#14418 under the same circumstances save _STRICT_POSIX, which + * thankfully neither Solaris nor illumos builds of Node or V8 do. + * + * If some future illumos push changes the MEMCNTL_SHARED assumptions made + * above, the illumos check below will have to be revisited. This check + * will work on both pre-and-post illumos#14418 illumos environments. + */ +#if defined(V8_OS_SOLARIS) && !(defined(__illumos__) && defined(MEMCNTL_SHARED)) #if (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE > 2) || defined(__EXTENSIONS__) extern "C" int madvise(caddr_t, size_t, int); #else diff --git a/doc/api/assert.md b/doc/api/assert.md index fe093bfceda330..cbc069db38e2b6 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -129,7 +129,7 @@ Legacy assertion mode may have surprising results, especially when using assert.deepEqual(/a/gi, new Date()); ``` -## Class: assert.AssertionError +## Class: `assert.AssertionError` * Extends: {errors.Error} @@ -149,6 +149,8 @@ added: v0.1.21 * `operator` {string} The `operator` property on the error instance. * `stackStartFn` {Function} If provided, the generated stack trace omits frames before this function. + * `diff` {string} If set to `'full'`, shows the full diff in assertion errors. Defaults to `'simple'`. + Accepted values: `'simple'`, `'full'`. A subclass of {Error} that indicates the failure of an assertion. @@ -215,6 +217,51 @@ try { } ``` +## Class: `assert.Assert` + + + +The `Assert` class allows creating independent assertion instances with custom options. + +### `new assert.Assert([options])` + +* `options` {Object} + * `diff` {string} If set to `'full'`, shows the full diff in assertion errors. Defaults to `'simple'`. + Accepted values: `'simple'`, `'full'`. + * `strict` {boolean} If set to `true`, non-strict methods behave like their + corresponding strict methods. Defaults to `true`. + +Creates a new assertion instance. The `diff` option controls the verbosity of diffs in assertion error messages. + +```js +const { Assert } = require('node:assert'); +const assertInstance = new Assert({ diff: 'full' }); +assertInstance.deepStrictEqual({ a: 1 }, { a: 2 }); +// Shows a full diff in the error message. +``` + +**Important**: When destructuring assertion methods from an `Assert` instance, +the methods lose their connection to the instance's configuration options (such as `diff` and `strict` settings). +The destructured methods will fall back to default behavior instead. + +```js +const myAssert = new Assert({ diff: 'full' }); + +// This works as expected - uses 'full' diff +myAssert.strictEqual({ a: 1 }, { b: { c: 1 } }); + +// This loses the 'full' diff setting - falls back to default 'simple' diff +const { strictEqual } = myAssert; +strictEqual({ a: 1 }, { b: { c: 1 } }); +``` + +When destructured, methods lose access to the instance's `this` context and revert to default assertion behavior +(diff: 'simple', non-strict mode). +To maintain custom options when using destructured methods, avoid +destructuring and call methods directly on the instance. + ## Class: `assert.CallTracker` -* {integer} **Default:** `8192` +* Type: {integer} **Default:** `8192` This is the size (in bytes) of pre-allocated internal `Buffer` instances used for pooling. This value may be modified. @@ -1556,7 +1556,7 @@ console.log(buf.toString('utf8')); ### `buf.buffer` -* {ArrayBuffer} The underlying `ArrayBuffer` object based on which this `Buffer` +* Type: {ArrayBuffer} The underlying `ArrayBuffer` object based on which this `Buffer` object is created. This `ArrayBuffer` is not guaranteed to correspond exactly to the original @@ -1584,7 +1584,7 @@ console.log(buffer.buffer === arrayBuffer); ### `buf.byteOffset` -* {integer} The `byteOffset` of the `Buffer`'s underlying `ArrayBuffer` object. +* Type: {integer} The `byteOffset` of the `Buffer`'s underlying `ArrayBuffer` object. When setting `byteOffset` in `Buffer.from(ArrayBuffer, byteOffset, length)`, or sometimes when allocating a `Buffer` smaller than `Buffer.poolSize`, the @@ -2425,7 +2425,7 @@ If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. added: v0.1.90 --> -* {integer} +* Type: {integer} Returns the number of bytes in `buf`. @@ -5217,7 +5217,7 @@ added: - v18.15.0 --> -* input {Buffer | ArrayBuffer | TypedArray} The input to validate. +* `input` {Buffer | ArrayBuffer | TypedArray} The input to validate. * Returns: {boolean} This function returns `true` if `input` contains only valid ASCII-encoded data, @@ -5233,7 +5233,7 @@ added: - v18.14.0 --> -* input {Buffer | ArrayBuffer | TypedArray} The input to validate. +* `input` {Buffer | ArrayBuffer | TypedArray} The input to validate. * Returns: {boolean} This function returns `true` if `input` contains only valid UTF-8-encoded data, @@ -5247,7 +5247,7 @@ Throws if the `input` is a detached array buffer. added: v0.5.4 --> -* {integer} **Default:** `50` +* Type: {integer} **Default:** `50` Returns the maximum number of bytes that will be returned when `buf.inspect()` is called. This can be overridden by user modules. See @@ -5259,7 +5259,7 @@ Returns the maximum number of bytes that will be returned when added: v3.0.0 --> -* {integer} The largest size allowed for a single `Buffer` instance. +* Type: {integer} The largest size allowed for a single `Buffer` instance. An alias for [`buffer.constants.MAX_LENGTH`][]. @@ -5269,7 +5269,7 @@ An alias for [`buffer.constants.MAX_LENGTH`][]. added: v3.0.0 --> -* {integer} The largest length allowed for a single `string` instance. +* Type: {integer} The largest length allowed for a single `string` instance. An alias for [`buffer.constants.MAX_STRING_LENGTH`][]. @@ -5383,7 +5383,7 @@ changes: 232 - 1 on 64-bit architectures. --> -* {integer} The largest size allowed for a single `Buffer` instance. +* Type: {integer} The largest size allowed for a single `Buffer` instance. On 32-bit architectures, this value currently is 230 - 1 (about 1 GiB). @@ -5400,7 +5400,7 @@ This value is also available as [`buffer.kMaxLength`][]. added: v8.2.0 --> -* {integer} The largest length allowed for a single `string` instance. +* Type: {integer} The largest length allowed for a single `string` instance. Represents the largest `length` that a `string` primitive can have, counted in UTF-16 code units. diff --git a/doc/api/child_process.md b/doc/api/child_process.md index 30c286e837d8f7..b09ccbd4df8da3 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -1602,7 +1602,7 @@ changes: description: The object no longer accidentally exposes native C++ bindings. --> -* {Object} A pipe representing the IPC channel to the child process. +* Type: {Object} A pipe representing the IPC channel to the child process. The `subprocess.channel` property is a reference to the child's IPC channel. If no IPC channel exists, this property is `undefined`. @@ -1631,7 +1631,7 @@ running, and lets it finish even while the channel is open. added: v0.7.2 --> -* {boolean} Set to `false` after `subprocess.disconnect()` is called. +* Type: {boolean} Set to `false` after `subprocess.disconnect()` is called. The `subprocess.connected` property indicates whether it is still possible to send and receive messages from a child process. When `subprocess.connected` is @@ -1660,7 +1660,7 @@ within the child process to close the IPC channel as well. ### `subprocess.exitCode` -* {integer} +* Type: {integer} The `subprocess.exitCode` property indicates the exit code of the child process. If the child process is still running, the field will be `null`. @@ -1783,7 +1783,7 @@ Calls [`subprocess.kill()`][] with `'SIGTERM'`. added: v0.5.10 --> -* {boolean} Set to `true` after `subprocess.kill()` is used to successfully +* Type: {boolean} Set to `true` after `subprocess.kill()` is used to successfully send a signal to the child process. The `subprocess.killed` property indicates whether the child process @@ -1796,7 +1796,7 @@ does not indicate that the child process has been terminated. added: v0.1.90 --> -* {integer|undefined} +* Type: {integer|undefined} Returns the process identifier (PID) of the child process. If the child process fails to spawn due to errors, then the value is `undefined` and `error` is @@ -2094,21 +2094,21 @@ connection to the child. ### `subprocess.signalCode` -* {string|null} +* Type: {string|null} The `subprocess.signalCode` property indicates the signal received by the child process if any, else `null`. ### `subprocess.spawnargs` -* {Array} +* Type: {Array} The `subprocess.spawnargs` property represents the full list of command-line arguments the child process was launched with. ### `subprocess.spawnfile` -* {string} +* Type: {string} The `subprocess.spawnfile` property indicates the executable file name of the child process that is launched. @@ -2126,7 +2126,7 @@ in which the child process is launched. added: v0.1.90 --> -* {stream.Readable|null|undefined} +* Type: {stream.Readable|null|undefined} A `Readable Stream` that represents the child process's `stderr`. @@ -2145,7 +2145,7 @@ if the child process could not be successfully spawned. added: v0.1.90 --> -* {stream.Writable|null|undefined} +* Type: {stream.Writable|null|undefined} A `Writable Stream` that represents the child process's `stdin`. @@ -2167,7 +2167,7 @@ if the child process could not be successfully spawned. added: v0.7.10 --> -* {Array} +* Type: {Array} A sparse array of pipes to the child process, corresponding with positions in the [`stdio`][] option passed to [`child_process.spawn()`][] that have been set @@ -2234,7 +2234,7 @@ not be successfully spawned. added: v0.1.90 --> -* {stream.Readable|null|undefined} +* Type: {stream.Readable|null|undefined} A `Readable Stream` that represents the child process's `stdout`. diff --git a/doc/api/cli.md b/doc/api/cli.md index 78c0794a57fd4f..1a1e0ec1d89a4d 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -36,8 +36,7 @@ If a file is found, its path will be passed to the point to be loaded with ECMAScript module loader, such as `--import` or [`--experimental-default-type=module`][]. * The file has an `.mjs` extension. -* The file has an `.mjs` or `.wasm` (with `--experimental-wasm-modules`) - extension. +* The file has an `.mjs` or `.wasm` extension. * The file does not have a `.cjs` extension, and the nearest parent `package.json` file contains a top-level [`"type"`][] field with a value of `"module"`. @@ -49,8 +48,7 @@ Otherwise, the file is loaded using the CommonJS module loader. See When loading, the [ES module loader][Modules loaders] loads the program entry point, the `node` command will accept as input only files with `.js`, -`.mjs`, or `.cjs` extensions; with `.wasm` extensions when -[`--experimental-wasm-modules`][] is enabled; and with no extension when +`.mjs`, `.cjs` or `.wasm` extensions; and with no extension when [`--experimental-default-type=module`][] is passed. ## Options @@ -499,13 +497,16 @@ $ ls *.cpuprofile CPU.20190409.202950.15293.0.0.cpuprofile ``` -If `--cpu-prof-name` is specified, the provided value will be used as-is; patterns such as -`${hhmmss}` or `${pid}` are not supported. +If `--cpu-prof-name` is specified, the provided value is used as a template +for the file name. The following placeholder is supported and will be +substituted at runtime: + +* `${pid}` — the current process ID ```console $ node --cpu-prof --cpu-prof-name 'CPU.${pid}.cpuprofile' index.js $ ls *.cpuprofile -'CPU.${pid}.cpuprofile' +CPU.15293.cpuprofile ``` ### `--cpu-prof-dir` @@ -1012,8 +1013,7 @@ Node.js currently defaults to CommonJS to instead default to ECMAScript modules, with the exception of folders and subfolders below `node_modules`, for backward compatibility. -Under `--experimental-default-type=module` and `--experimental-wasm-modules`, -files with no extension will be treated as WebAssembly if they begin with the +Files with no extension will be treated as WebAssembly if they begin with the WebAssembly magic number (`\0asm`); otherwise they will be treated as ES module JavaScript. @@ -1046,6 +1046,17 @@ passing a second `parentURL` argument for contextual resolution. Previously gated the entire `import.meta.resolve` feature. +### `--experimental-inspector-network-resource` + + + +> Stability: 1.1 - Active Development + +Enable experimental support for inspector network resources. + ### `--experimental-loader=module` - -Enable experimental WebAssembly module support. - ### `--experimental-webstorage` +> Stability: 1.1 - Active Development + Customizes the signal sent to the process on watch mode restarts. ```bash @@ -3338,7 +3345,6 @@ one is included in the list below. * `--experimental-transform-types` * `--experimental-vm-modules` * `--experimental-wasi-unstable-preview1` -* `--experimental-wasm-modules` * `--experimental-webstorage` * `--force-context-aware` * `--force-fips` @@ -3584,6 +3590,18 @@ If `value` equals `'0'`, certificate validation is disabled for TLS connections. This makes TLS, and HTTPS by extension, insecure. The use of this environment variable is strongly discouraged. +### `NODE_USE_SYSTEM_CA=1` + + + +Node.js uses the trusted CA certificates present in the system store along with +the `--use-bundled-ca` option and the `NODE_EXTRA_CA_CERTS` environment variable. + +This can also be enabled using the [`--use-system-ca`][] command-line flag. +When both are set, `--use-system-ca` takes precedence. + ### `NODE_V8_COVERAGE=dir` When set, Node.js will begin outputting [V8 JavaScript code coverage][] and @@ -3908,7 +3926,6 @@ node --stack-trace-limit=12 -p -e "Error.stackTraceLimit" # prints 12 [`--env-file`]: #--env-fileconfig [`--experimental-default-type=module`]: #--experimental-default-typetype [`--experimental-sea-config`]: single-executable-applications.md#generating-single-executable-preparation-blobs -[`--experimental-wasm-modules`]: #--experimental-wasm-modules [`--heap-prof-dir`]: #--heap-prof-dir [`--import`]: #--importmodule [`--no-experimental-strip-types`]: #--no-experimental-strip-types @@ -3917,6 +3934,7 @@ node --stack-trace-limit=12 -p -e "Error.stackTraceLimit" # prints 12 [`--print`]: #-p---print-script [`--redirect-warnings`]: #--redirect-warningsfile [`--require`]: #-r---require-module +[`--use-system-ca`]: #--use-system-ca [`AsyncLocalStorage`]: async_context.md#class-asynclocalstorage [`Atomics.wait()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Atomics/wait [`Buffer`]: buffer.md#class-buffer diff --git a/doc/api/cluster.md b/doc/api/cluster.md index c5946ba8efb6c5..fa9942f4668bdf 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -450,7 +450,7 @@ if (cluster.isPrimary) { added: v6.0.0 --> -* {boolean} +* Type: {boolean} This property is `true` if the worker exited due to `.disconnect()`. If the worker exited any other way, it is `false`. If the @@ -477,7 +477,7 @@ worker.kill(); added: v0.8.0 --> -* {integer} +* Type: {integer} Each new worker is given its own unique id, this id is stored in the `id`. @@ -595,7 +595,7 @@ it is [`kill()`][]. added: v0.7.0 --> -* {ChildProcess} +* Type: {ChildProcess} All workers are created using [`child_process.fork()`][], the returned object from this function is stored as `.process`. In a worker, the global `process` @@ -860,7 +860,7 @@ Deprecated alias for [`cluster.isPrimary`][]. added: v16.0.0 --> -* {boolean} +* Type: {boolean} True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID` is @@ -872,7 +872,7 @@ undefined, then `isPrimary` is `true`. added: v0.6.0 --> -* {boolean} +* Type: {boolean} True if the process is not a primary (it is the negation of `cluster.isPrimary`). @@ -919,7 +919,7 @@ changes: description: The `stdio` option is supported now. --> -* {Object} +* Type: {Object} * `execArgv` {string\[]} List of string arguments passed to the Node.js executable. **Default:** `process.execArgv`. * `exec` {string} File path to worker file. **Default:** `process.argv[1]`. @@ -1026,7 +1026,7 @@ This can only be called from the primary process. added: v0.7.0 --> -* {Object} +* Type: {Object} A reference to the current worker object. Not available in the primary process. @@ -1060,7 +1060,7 @@ if (cluster.isPrimary) { added: v0.7.0 --> -* {Object} +* Type: {Object} A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 1c74794ec54b90..c59d7f1042d4ac 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -1993,15 +1993,15 @@ changes: for RSA-PSS keys. --> -* {Object} - * `modulusLength`: {number} Key size in bits (RSA, DSA). - * `publicExponent`: {bigint} Public exponent (RSA). - * `hashAlgorithm`: {string} Name of the message digest (RSA-PSS). - * `mgf1HashAlgorithm`: {string} Name of the message digest used by +* Type: {Object} + * `modulusLength` {number} Key size in bits (RSA, DSA). + * `publicExponent` {bigint} Public exponent (RSA). + * `hashAlgorithm` {string} Name of the message digest (RSA-PSS). + * `mgf1HashAlgorithm` {string} Name of the message digest used by MGF1 (RSA-PSS). - * `saltLength`: {number} Minimal salt length in bytes (RSA-PSS). - * `divisorLength`: {number} Size of `q` in bits (DSA). - * `namedCurve`: {string} Name of the curve (EC). + * `saltLength` {number} Minimal salt length in bytes (RSA-PSS). + * `divisorLength` {number} Size of `q` in bits (DSA). + * `namedCurve` {string} Name of the curve (EC). This property exists only on asymmetric keys. Depending on the type of the key, this object contains information about the key. None of the information obtained @@ -2039,7 +2039,7 @@ changes: description: Added support for `'ed25519'` and `'ed448'`. --> -* {string} +* Type: {string} For asymmetric keys, this property represents the type of the key. Supported key types are: @@ -2065,7 +2065,7 @@ added: - v16.15.0 --> -* `otherKeyObject`: {KeyObject} A `KeyObject` with which to +* `otherKeyObject` {KeyObject} A `KeyObject` with which to compare `keyObject`. * Returns: {boolean} @@ -2083,27 +2083,27 @@ changes: description: Added support for `'jwk'` format. --> -* `options`: {Object} +* `options` {Object} * Returns: {string | Buffer | Object} For symmetric keys, the following encoding options can be used: -* `format`: {string} Must be `'buffer'` (default) or `'jwk'`. +* `format` {string} Must be `'buffer'` (default) or `'jwk'`. For public keys, the following encoding options can be used: -* `type`: {string} Must be one of `'pkcs1'` (RSA only) or `'spki'`. -* `format`: {string} Must be `'pem'`, `'der'`, or `'jwk'`. +* `type` {string} Must be one of `'pkcs1'` (RSA only) or `'spki'`. +* `format` {string} Must be `'pem'`, `'der'`, or `'jwk'`. For private keys, the following encoding options can be used: -* `type`: {string} Must be one of `'pkcs1'` (RSA only), `'pkcs8'` or +* `type` {string} Must be one of `'pkcs1'` (RSA only), `'pkcs8'` or `'sec1'` (EC only). -* `format`: {string} Must be `'pem'`, `'der'`, or `'jwk'`. -* `cipher`: {string} If specified, the private key will be encrypted with +* `format` {string} Must be `'pem'`, `'der'`, or `'jwk'`. +* `cipher` {string} If specified, the private key will be encrypted with the given `cipher` and `passphrase` using PKCS#5 v2.0 password based encryption. -* `passphrase`: {string | Buffer} The passphrase to use for encryption, see +* `passphrase` {string | Buffer} The passphrase to use for encryption, see `cipher`. The result type depends on the selected encoding format, when PEM the @@ -2129,7 +2129,7 @@ PKCS#1 and SEC1 encryption. added: v11.6.0 --> -* {number} +* Type: {number} For secret keys, this property represents the size of the key in bytes. This property is `undefined` for asymmetric keys. @@ -2142,12 +2142,12 @@ added: v22.10.0 -* `algorithm`: {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} +* `algorithm` {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} -* `extractable`: {boolean} -* `keyUsages`: {string\[]} See [Key usages][]. +* `extractable` {boolean} +* `keyUsages` {string\[]} See [Key usages][]. * Returns: {CryptoKey} Converts a `KeyObject` instance to a `CryptoKey`. @@ -2158,7 +2158,7 @@ Converts a `KeyObject` instance to a `CryptoKey`. added: v11.6.0 --> -* {string} +* Type: {string} Depending on the type of this `KeyObject`, this property is either `'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys @@ -2677,16 +2677,6 @@ added: v15.6.0 Checks whether the public key for this certificate is consistent with the given private key. -### `x509.extKeyUsage` - - - -* Type: {string\[]} - -An array detailing the key extended usages for this certificate. - ### `x509.fingerprint` + +* Type: {string\[]} + +An array detailing the key extended usages for this certificate. + ### `x509.publicKey` -* {Object} +* Type: {Object} An object containing commonly used constants for crypto and security related operations. The specific constants currently defined are described in @@ -3408,14 +3408,14 @@ changes: * `key` {Object|string|ArrayBuffer|Buffer|TypedArray|DataView} - * `key`: {string|ArrayBuffer|Buffer|TypedArray|DataView|Object} The key + * `key` {string|ArrayBuffer|Buffer|TypedArray|DataView|Object} The key material, either in PEM, DER, or JWK format. - * `format`: {string} Must be `'pem'`, `'der'`, or '`'jwk'`. + * `format` {string} Must be `'pem'`, `'der'`, or '`'jwk'`. **Default:** `'pem'`. - * `type`: {string} Must be `'pkcs1'`, `'pkcs8'` or `'sec1'`. This option is + * `type` {string} Must be `'pkcs1'`, `'pkcs8'` or `'sec1'`. This option is required only if the `format` is `'der'` and ignored otherwise. - * `passphrase`: {string | Buffer} The passphrase to use for decryption. - * `encoding`: {string} The string encoding to use when `key` is a string. + * `passphrase` {string | Buffer} The passphrase to use for decryption. + * `encoding` {string} The string encoding to use when `key` is a string. * Returns: {KeyObject} @@ -3451,11 +3451,11 @@ changes: * `key` {Object|string|ArrayBuffer|Buffer|TypedArray|DataView} - * `key`: {string|ArrayBuffer|Buffer|TypedArray|DataView|Object} The key + * `key` {string|ArrayBuffer|Buffer|TypedArray|DataView|Object} The key material, either in PEM, DER, or JWK format. - * `format`: {string} Must be `'pem'`, `'der'`, or `'jwk'`. + * `format` {string} Must be `'pem'`, `'der'`, or `'jwk'`. **Default:** `'pem'`. - * `type`: {string} Must be `'pkcs1'` or `'spki'`. This option is + * `type` {string} Must be `'pkcs1'` or `'spki'`. This option is required only if the `format` is `'der'` and ignored otherwise. * `encoding` {string} The string encoding to use when `key` is a string. * Returns: {KeyObject} @@ -3586,18 +3586,18 @@ changes: `ERR_INVALID_CALLBACK`. --> -* `type`: {string} The intended use of the generated secret key. Currently +* `type` {string} The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. -* `options`: {Object} - * `length`: {number} The bit length of the key to generate. This must be a +* `options` {Object} + * `length` {number} The bit length of the key to generate. This must be a value greater than 0. * If `type` is `'hmac'`, the minimum is 8, and the maximum length is 231-1. If the value is not a multiple of 8, the generated key will be truncated to `Math.floor(length / 8)`. * If `type` is `'aes'`, the length must be one of `128`, `192`, or `256`. -* `callback`: {Function} - * `err`: {Error} - * `key`: {KeyObject} +* `callback` {Function} + * `err` {Error} + * `key` {KeyObject} Asynchronously generates a new random secret key of the given `length`. The `type` will determine which validations will be performed on the `length`. @@ -3661,30 +3661,30 @@ changes: produce key objects if no encoding was specified. --> -* `type`: {string} Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, +* `type` {string} Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. -* `options`: {Object} - * `modulusLength`: {number} Key size in bits (RSA, DSA). - * `publicExponent`: {number} Public exponent (RSA). **Default:** `0x10001`. - * `hashAlgorithm`: {string} Name of the message digest (RSA-PSS). - * `mgf1HashAlgorithm`: {string} Name of the message digest used by +* `options` {Object} + * `modulusLength` {number} Key size in bits (RSA, DSA). + * `publicExponent` {number} Public exponent (RSA). **Default:** `0x10001`. + * `hashAlgorithm` {string} Name of the message digest (RSA-PSS). + * `mgf1HashAlgorithm` {string} Name of the message digest used by MGF1 (RSA-PSS). - * `saltLength`: {number} Minimal salt length in bytes (RSA-PSS). - * `divisorLength`: {number} Size of `q` in bits (DSA). - * `namedCurve`: {string} Name of the curve to use (EC). - * `prime`: {Buffer} The prime parameter (DH). - * `primeLength`: {number} Prime length in bits (DH). - * `generator`: {number} Custom generator (DH). **Default:** `2`. - * `groupName`: {string} Diffie-Hellman group name (DH). See + * `saltLength` {number} Minimal salt length in bytes (RSA-PSS). + * `divisorLength` {number} Size of `q` in bits (DSA). + * `namedCurve` {string} Name of the curve to use (EC). + * `prime` {Buffer} The prime parameter (DH). + * `primeLength` {number} Prime length in bits (DH). + * `generator` {number} Custom generator (DH). **Default:** `2`. + * `groupName` {string} Diffie-Hellman group name (DH). See [`crypto.getDiffieHellman()`][]. - * `paramEncoding`: {string} Must be `'named'` or `'explicit'` (EC). + * `paramEncoding` {string} Must be `'named'` or `'explicit'` (EC). **Default:** `'named'`. - * `publicKeyEncoding`: {Object} See [`keyObject.export()`][]. - * `privateKeyEncoding`: {Object} See [`keyObject.export()`][]. -* `callback`: {Function} - * `err`: {Error} - * `publicKey`: {string | Buffer | KeyObject} - * `privateKey`: {string | Buffer | KeyObject} + * `publicKeyEncoding` {Object} See [`keyObject.export()`][]. + * `privateKeyEncoding` {Object} See [`keyObject.export()`][]. +* `callback` {Function} + * `err` {Error} + * `publicKey` {string | Buffer | KeyObject} + * `privateKey` {string | Buffer | KeyObject} Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, Ed25519, Ed448, X25519, X448, and DH are currently supported. @@ -3775,29 +3775,29 @@ changes: produce key objects if no encoding was specified. --> -* `type`: {string} Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, +* `type` {string} Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. -* `options`: {Object} - * `modulusLength`: {number} Key size in bits (RSA, DSA). - * `publicExponent`: {number} Public exponent (RSA). **Default:** `0x10001`. - * `hashAlgorithm`: {string} Name of the message digest (RSA-PSS). - * `mgf1HashAlgorithm`: {string} Name of the message digest used by +* `options` {Object} + * `modulusLength` {number} Key size in bits (RSA, DSA). + * `publicExponent` {number} Public exponent (RSA). **Default:** `0x10001`. + * `hashAlgorithm` {string} Name of the message digest (RSA-PSS). + * `mgf1HashAlgorithm` {string} Name of the message digest used by MGF1 (RSA-PSS). - * `saltLength`: {number} Minimal salt length in bytes (RSA-PSS). - * `divisorLength`: {number} Size of `q` in bits (DSA). - * `namedCurve`: {string} Name of the curve to use (EC). - * `prime`: {Buffer} The prime parameter (DH). - * `primeLength`: {number} Prime length in bits (DH). - * `generator`: {number} Custom generator (DH). **Default:** `2`. - * `groupName`: {string} Diffie-Hellman group name (DH). See + * `saltLength` {number} Minimal salt length in bytes (RSA-PSS). + * `divisorLength` {number} Size of `q` in bits (DSA). + * `namedCurve` {string} Name of the curve to use (EC). + * `prime` {Buffer} The prime parameter (DH). + * `primeLength` {number} Prime length in bits (DH). + * `generator` {number} Custom generator (DH). **Default:** `2`. + * `groupName` {string} Diffie-Hellman group name (DH). See [`crypto.getDiffieHellman()`][]. - * `paramEncoding`: {string} Must be `'named'` or `'explicit'` (EC). + * `paramEncoding` {string} Must be `'named'` or `'explicit'` (EC). **Default:** `'named'`. - * `publicKeyEncoding`: {Object} See [`keyObject.export()`][]. - * `privateKeyEncoding`: {Object} See [`keyObject.export()`][]. + * `publicKeyEncoding` {Object} See [`keyObject.export()`][]. + * `privateKeyEncoding` {Object} See [`keyObject.export()`][]. * Returns: {Object} - * `publicKey`: {string | Buffer | KeyObject} - * `privateKey`: {string | Buffer | KeyObject} + * `publicKey` {string | Buffer | KeyObject} + * `privateKey` {string | Buffer | KeyObject} Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, Ed25519, Ed448, X25519, X448, and DH are currently supported. @@ -3866,10 +3866,10 @@ it will be a buffer containing the data encoded as DER. added: v15.0.0 --> -* `type`: {string} The intended use of the generated secret key. Currently +* `type` {string} The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. -* `options`: {Object} - * `length`: {number} The bit length of the key to generate. +* `options` {Object} + * `length` {number} The bit length of the key to generate. * If `type` is `'hmac'`, the minimum is 8, and the maximum length is 231-1. If the value is not a multiple of 8, the generated key will be truncated to `Math.floor(length / 8)`. @@ -4010,10 +4010,10 @@ the process unresponsive. added: v15.0.0 --> -* `nameOrNid`: {string|number} The name or nid of the cipher to query. -* `options`: {Object} - * `keyLength`: {number} A test key length. - * `ivLength`: {number} A test IV length. +* `nameOrNid` {string|number} The name or nid of the cipher to query. +* `options` {Object} + * `keyLength` {number} A test key length. + * `ivLength` {number} A test IV length. * Returns: {Object} * `name` {string} The name of the cipher * `nid` {number} The nid of the cipher diff --git a/doc/api/deprecations.md b/doc/api/deprecations.md index 5f280bbc2edf51..7ee2f298e83875 100644 --- a/doc/api/deprecations.md +++ b/doc/api/deprecations.md @@ -3865,6 +3865,21 @@ Type: Documentation-only (supports [`--pending-deprecation`][]) Creating SHAKE-128 and SHAKE-256 digests without an explicit `options.outputLength` is deprecated. +### DEP0199: `require('node:_http_*')` + + + +Type: Documentation-only + +The `node:_http_agent`, `node:_http_client`, `node:_http_common`, `node:_http_incoming`, +`node:_http_outgoing` and `node:_http_server` modules are deprecated as they should be considered +an internal nodejs implementation rather than a public facing API, use `node:http` instead. + [DEP0142]: #dep0142-repl_builtinlibs [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 diff --git a/doc/api/diagnostics_channel.md b/doc/api/diagnostics_channel.md index 0aeef6c65c5db1..fe0d28fc29d281 100644 --- a/doc/api/diagnostics_channel.md +++ b/doc/api/diagnostics_channel.md @@ -1106,35 +1106,35 @@ for the sync error and one for the async error. > Stability: 1 - Experimental -`console.log` +##### Event: `'console.log'` * `args` {any\[]} Emitted when `console.log()` is called. Receives and array of the arguments passed to `console.log()`. -`console.info` +##### Event: `'console.info'` * `args` {any\[]} Emitted when `console.info()` is called. Receives and array of the arguments passed to `console.info()`. -`console.debug` +##### Event: `'console.debug'` * `args` {any\[]} Emitted when `console.debug()` is called. Receives and array of the arguments passed to `console.debug()`. -`console.warn` +##### Event: `'console.warn'` * `args` {any\[]} Emitted when `console.warn()` is called. Receives and array of the arguments passed to `console.warn()`. -`console.error` +##### Event: `'console.error'` * `args` {any\[]} @@ -1145,34 +1145,34 @@ passed to `console.error()`. > Stability: 1 - Experimental -`http.client.request.created` +##### Event: `'http.client.request.created'` * `request` {http.ClientRequest} Emitted when client creates a request object. Unlike `http.client.request.start`, this event is emitted before the request has been sent. -`http.client.request.start` +##### Event: `'http.client.request.start'` * `request` {http.ClientRequest} Emitted when client starts a request. -`http.client.request.error` +##### Event: `'http.client.request.error'` * `request` {http.ClientRequest} * `error` {Error} Emitted when an error occurs during a client request. -`http.client.response.finish` +##### Event: `'http.client.response.finish'` * `request` {http.ClientRequest} * `response` {http.IncomingMessage} Emitted when client receives a response. -`http.server.request.start` +##### Event: `'http.server.request.start'` * `request` {http.IncomingMessage} * `response` {http.ServerResponse} @@ -1181,7 +1181,7 @@ Emitted when client receives a response. Emitted when server receives a request. -`http.server.response.created` +##### Event: `'http.server.response.created'` * `request` {http.IncomingMessage} * `response` {http.ServerResponse} @@ -1189,7 +1189,7 @@ Emitted when server receives a request. Emitted when server creates a response. The event is emitted before the response is sent. -`http.server.response.finish` +##### Event: `'http.server.response.finish'` * `request` {http.IncomingMessage} * `response` {http.ServerResponse} @@ -1202,28 +1202,28 @@ Emitted when server sends a response. > Stability: 1 - Experimental -`http2.client.stream.created` +##### Event: `'http2.client.stream.created'` * `stream` {ClientHttp2Stream} * `headers` {HTTP/2 Headers Object} Emitted when a stream is created on the client. -`http2.client.stream.start` +##### Event: `'http2.client.stream.start'` * `stream` {ClientHttp2Stream} * `headers` {HTTP/2 Headers Object} Emitted when a stream is started on the client. -`http2.client.stream.error` +##### Event: `'http2.client.stream.error'` * `stream` {ClientHttp2Stream} * `error` {Error} Emitted when an error occurs during the processing of a stream on the client. -`http2.client.stream.finish` +##### Event: `'http2.client.stream.finish'` * `stream` {ClientHttp2Stream} * `headers` {HTTP/2 Headers Object} @@ -1231,35 +1231,35 @@ Emitted when an error occurs during the processing of a stream on the client. Emitted when a stream is received on the client. -`http2.client.stream.close` +##### Event: `'http2.client.stream.close'` * `stream` {ClientHttp2Stream} Emitted when a stream is closed on the client. The HTTP/2 error code used when closing the stream can be retrieved using the `stream.rstCode` property. -`http2.server.stream.created` +##### Event: `'http2.server.stream.created'` * `stream` {ServerHttp2Stream} * `headers` {HTTP/2 Headers Object} Emitted when a stream is created on the server. -`http2.server.stream.start` +##### Event: `'http2.server.stream.start'` * `stream` {ServerHttp2Stream} * `headers` {HTTP/2 Headers Object} Emitted when a stream is started on the server. -`http2.server.stream.error` +##### Event: `'http2.server.stream.error'` * `stream` {ServerHttp2Stream} * `error` {Error} Emitted when an error occurs during the processing of a stream on the server. -`http2.server.stream.finish` +##### Event: `'http2.server.stream.finish'` * `stream` {ServerHttp2Stream} * `headers` {HTTP/2 Headers Object} @@ -1267,7 +1267,7 @@ Emitted when an error occurs during the processing of a stream on the server. Emitted when a stream is sent on the server. -`http2.server.stream.close` +##### Event: `'http2.server.stream.close'` * `stream` {ServerHttp2Stream} @@ -1278,52 +1278,52 @@ closing the stream can be retrieved using the `stream.rstCode` property. > Stability: 1 - Experimental -`module.require.start` +##### Event: `'module.require.start'` * `event` {Object} containing the following properties - * `id` - Argument passed to `require()`. Module name. - * `parentFilename` - Name of the module that attempted to require(id). + * `id` Argument passed to `require()`. Module name. + * `parentFilename` Name of the module that attempted to require(id). Emitted when `require()` is executed. See [`start` event][]. -`module.require.end` +##### Event: `'module.require.end'` * `event` {Object} containing the following properties - * `id` - Argument passed to `require()`. Module name. - * `parentFilename` - Name of the module that attempted to require(id). + * `id` Argument passed to `require()`. Module name. + * `parentFilename` Name of the module that attempted to require(id). Emitted when a `require()` call returns. See [`end` event][]. -`module.require.error` +##### Event: `'module.require.error'` * `event` {Object} containing the following properties - * `id` - Argument passed to `require()`. Module name. - * `parentFilename` - Name of the module that attempted to require(id). + * `id` Argument passed to `require()`. Module name. + * `parentFilename` Name of the module that attempted to require(id). * `error` {Error} Emitted when a `require()` throws an error. See [`error` event][]. -`module.import.asyncStart` +##### Event: `'module.import.asyncStart'` * `event` {Object} containing the following properties - * `id` - Argument passed to `import()`. Module name. - * `parentURL` - URL object of the module that attempted to import(id). + * `id` Argument passed to `import()`. Module name. + * `parentURL` URL object of the module that attempted to import(id). Emitted when `import()` is invoked. See [`asyncStart` event][]. -`module.import.asyncEnd` +##### Event: `'module.import.asyncEnd'` * `event` {Object} containing the following properties - * `id` - Argument passed to `import()`. Module name. - * `parentURL` - URL object of the module that attempted to import(id). + * `id` Argument passed to `import()`. Module name. + * `parentURL` URL object of the module that attempted to import(id). Emitted when `import()` has completed. See [`asyncEnd` event][]. -`module.import.error` +##### Event: `'module.import.error'` * `event` {Object} containing the following properties - * `id` - Argument passed to `import()`. Module name. - * `parentURL` - URL object of the module that attempted to import(id). + * `id` Argument passed to `import()`. Module name. + * `parentURL` URL object of the module that attempted to import(id). * `error` {Error} Emitted when a `import()` throws an error. See [`error` event][]. @@ -1332,32 +1332,32 @@ Emitted when a `import()` throws an error. See [`error` event][]. > Stability: 1 - Experimental -`net.client.socket` +##### Event: `'net.client.socket'` * `socket` {net.Socket|tls.TLSSocket} Emitted when a new TCP or pipe client socket connection is created. -`net.server.socket` +##### Event: `'net.server.socket'` * `socket` {net.Socket} Emitted when a new TCP or pipe connection is received. -`tracing:net.server.listen:asyncStart` +##### Event: `'tracing:net.server.listen:asyncStart'` * `server` {net.Server} * `options` {Object} Emitted when [`net.Server.listen()`][] is invoked, before the port or pipe is actually setup. -`tracing:net.server.listen:asyncEnd` +##### Event: `'tracing:net.server.listen:asyncEnd'` * `server` {net.Server} Emitted when [`net.Server.listen()`][] has completed and thus the server is ready to accept connection. -`tracing:net.server.listen:error` +##### Event: `'tracing:net.server.listen:error'` * `server` {net.Server} * `error` {Error} @@ -1368,7 +1368,7 @@ Emitted when [`net.Server.listen()`][] is returning an error. > Stability: 1 - Experimental -`udp.socket` +##### Event: `'udp.socket'` * `socket` {dgram.Socket} @@ -1382,13 +1382,13 @@ Emitted when a new UDP socket is created. added: v16.18.0 --> -`child_process` +##### Event: `'child_process'` * `process` {ChildProcess} Emitted when a new process is created. -`execve` +##### Event: `'execve'` * `execPath` {string} * `args` {string\[]} @@ -1404,16 +1404,15 @@ Emitted when [`process.execve()`][] is invoked. added: v16.18.0 --> -`worker_threads` +##### Event: `'worker_threads'` -* `worker` [`Worker`][] +* `worker` {Worker} Emitted when a new thread is created. [TracingChannel Channels]: #tracingchannel-channels [`'uncaughtException'`]: process.md#event-uncaughtexception [`TracingChannel`]: #class-tracingchannel -[`Worker`]: worker_threads.md#class-worker [`asyncEnd` event]: #asyncendevent [`asyncStart` event]: #asyncstartevent [`channel.bindStore(store)`]: #channelbindstorestore-transform diff --git a/doc/api/dns.md b/doc/api/dns.md index d2d7c85ecb6eb7..a8c7cb51c58218 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -157,6 +157,8 @@ Create a new resolver. default timeout. * `tries` {integer} The number of tries the resolver will try contacting each name server before giving up. **Default:** `4` + * `maxTimeout` {integer} The max retry timeout, in milliseconds. + **Default:** `0`, disabled. ### `resolver.cancel()` @@ -850,14 +852,10 @@ changes: `ERR_INVALID_CALLBACK`. --> - - * `hostname` {string} * `callback` {Function} * `err` {Error} - * `records`
\ - - + * `records` {string\[]} Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. The `records` argument passed to the `callback` function is a diff --git a/doc/api/documentation.md b/doc/api/documentation.md index c6edb13ad613cd..6938ac40a21374 100644 --- a/doc/api/documentation.md +++ b/doc/api/documentation.md @@ -41,9 +41,9 @@ The stability indexes are as follows: > minimum viability. > * 1.2 - Release candidate. Experimental features at this stage are hopefully > ready to become stable. No further breaking changes are anticipated but may -> still occur in response to user feedback. We encourage user testing and -> feedback so that we can know that this feature is ready to be marked as -> stable. +> still occur in response to user feedback or the features' underlying +> specification development. We encourage user testing and feedback so that +> we can know that this feature is ready to be marked as stable. > > Experimental features leave the experimental status typically either by > graduating to stable, or are removed without a deprecation cycle. diff --git a/doc/api/domain.md b/doc/api/domain.md index e986b8ec8527a7..47f97035fca8d6 100644 --- a/doc/api/domain.md +++ b/doc/api/domain.md @@ -287,7 +287,7 @@ To handle the errors that it catches, listen to its `'error'` event. ### `domain.members` -* {Array} +* Type: {Array} An array of timers and event emitters that have been explicitly added to the domain. diff --git a/doc/api/errors.md b/doc/api/errors.md index 51cc1e38eb24a5..952394745140c1 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -228,7 +228,7 @@ a(); ### `Error.stackTraceLimit` -* {number} +* Type: {number} The `Error.stackTraceLimit` property specifies the number of stack frames collected by a stack trace (whether generated by `new Error().stack` or @@ -246,7 +246,7 @@ not capture any frames. added: v16.9.0 --> -* {any} +* Type: {any} If present, the `error.cause` property is the underlying cause of the `Error`. It is used when catching an error and throwing a new one with a different @@ -285,7 +285,7 @@ console.log(symptom); ### `error.code` -* {string} +* Type: {string} The `error.code` property is a string label that identifies the kind of error. `error.code` is the most stable way to identify an error. It will only change @@ -295,7 +295,7 @@ about specific codes. ### `error.message` -* {string} +* Type: {string} The `error.message` property is the string description of the error as set by calling `new Error(message)`. The `message` passed to the constructor will also @@ -312,7 +312,7 @@ console.error(err.message); ### `error.stack` -* {string} +* Type: {string} The `error.stack` property is a string describing the point in the code at which the `Error` was instantiated. @@ -471,27 +471,27 @@ attempts to read a file that does not exist. ### `error.address` -* {string} +* Type: {string} If present, `error.address` is a string describing the address to which a network connection failed. ### `error.code` -* {string} +* Type: {string} The `error.code` property is a string representing the error code. ### `error.dest` -* {string} +* Type: {string} If present, `error.dest` is the file path destination when reporting a file system error. ### `error.errno` -* {number} +* Type: {number} The `error.errno` property is a negative number which corresponds to the error code defined in [`libuv Error handling`][]. @@ -503,31 +503,31 @@ To get the string representation of the error code, use ### `error.info` -* {Object} +* Type: {Object} If present, `error.info` is an object with details about the error condition. ### `error.message` -* {string} +* Type: {string} `error.message` is a system-provided human-readable description of the error. ### `error.path` -* {string} +* Type: {string} If present, `error.path` is a string containing a relevant invalid pathname. ### `error.port` -* {number} +* Type: {number} If present, `error.port` is the network connection port that is not available. ### `error.syscall` -* {string} +* Type: {string} The `error.syscall` property is a string describing the [syscall][] that failed. @@ -1368,8 +1368,10 @@ Path is a directory. ### `ERR_FS_FILE_TOO_LARGE` -An attempt has been made to read a file whose size is larger than the maximum -allowed size for a `Buffer`. +An attempt was made to read a file larger than the supported 2 GiB limit for +`fs.readFile()`. This is not a limitation of `Buffer`, but an internal I/O constraint. +For handling larger files, consider using `fs.createReadStream()` to read the +file in chunks. diff --git a/doc/api/esm.md b/doc/api/esm.md index 6c11bd1a13ede1..1c7c1b00f8f17c 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -336,7 +336,7 @@ modules it can be used to load ES modules. ## `import.meta` -* {Object} +* Type: {Object} The `import.meta` meta property is an `Object` that contains the following properties. It is only supported in ES modules. @@ -353,7 +353,7 @@ changes: description: This property is no longer experimental. --> -* {string} The directory name of the current module. +* Type: {string} The directory name of the current module. This is the same as the [`path.dirname()`][] of the [`import.meta.filename`][]. @@ -371,7 +371,7 @@ changes: description: This property is no longer experimental. --> -* {string} The full absolute path and filename of the current module, with +* Type: {string} The full absolute path and filename of the current module, with symlinks resolved. This is the same as the [`url.fileURLToPath()`][] of the [`import.meta.url`][]. @@ -381,7 +381,7 @@ This is the same as the [`url.fileURLToPath()`][] of the [`import.meta.url`][]. ### `import.meta.url` -* {string} The absolute `file:` URL of the module. +* Type: {string} The absolute `file:` URL of the module. This is defined exactly the same as it is in browsers providing the URL of the current module file. @@ -402,7 +402,7 @@ added: > Stability: 1.0 - Early development -* {boolean} `true` when the current module is the entry point of the current process; `false` otherwise. +* Type: {boolean} `true` when the current module is the entry point of the current process; `false` otherwise. Equivalent to `require.main === module` in CommonJS. @@ -672,10 +672,24 @@ imported from the same path. ## Wasm modules + + +Importing both WebAssembly module instances and WebAssembly source phase +imports is supported. + +This integration is in line with the +[ES Module Integration Proposal for WebAssembly][]. + +## Wasm modules + > Stability: 1 - Experimental -Importing WebAssembly modules is supported under the -`--experimental-wasm-modules` flag, allowing any `.wasm` files to be +Importing WebAssembly modules is supported allowing any `.wasm` files to be imported as normal modules while also supporting their module imports. This integration is in line with the @@ -691,11 +705,70 @@ console.log(M); executed under: ```bash -node --experimental-wasm-modules index.mjs +node index.mjs ``` would provide the exports interface for the instantiation of `module.wasm`. +### JavaScript String Builtins + + + +When importing WebAssembly modules, the +[WebAssembly JS String Builtins Proposal][] is automatically enabled through the +ESM Integration. This allows WebAssembly modules to directly use efficient +compile-time string builtins from the `wasm:js-string` namespace. + +For example, the following Wasm module exports a string `getLength` function using +the `wasm:js-string` `length` builtin: + +```text +(module + ;; Compile-time import of the string length builtin. + (import "wasm:js-string" "length" (func $string_length (param externref) (result i32))) + + ;; Define getLength, taking a JS value parameter assumed to be a string, + ;; calling string length on it and returning the result. + (func $getLength (param $str externref) (result i32) + local.get $str + call $string_length + ) + + ;; Export the getLength function. + (export "getLength" (func $get_length)) +) +``` + +```js +import { getLength } from './string-len.wasm'; +getLength('foo'); // Returns 3. +``` + +Wasm builtins are compile-time imports that are linked during module compilation +rather than during instantiation. They do not behave like normal module graph +imports and they cannot be inspected via `WebAssembly.Module.imports(mod)` +or virtualized unless recompiling the module using the direct +`WebAssembly.compile` API with string builtins disabled. + +### Reserved Wasm Namespaces + + + +When importing WebAssembly modules through the ESM Integration, they cannot use +import module names or import/export names that start with reserved prefixes: + +* `wasm-js:` - reserved in all module import names, module names and export + names. +* `wasm:` - reserved in module import names and export names (imported module + names are allowed in order to support future builtin polyfills). + +Importing a module using the above reserved names will throw a +`WebAssembly.LinkError`. + ## Top-level `await` @@ -1053,7 +1126,7 @@ _isImports_, _conditions_) > 1. Return _"commonjs"_. > 4. If _url_ ends in _".json"_, then > 1. Return _"json"_. -> 5. If `--experimental-wasm-modules` is enabled and _url_ ends in +> 5. If _url_ ends in > _".wasm"_, then > 1. Return _"wasm"_. > 6. Let _packageURL_ be the result of **LOOKUP\_PACKAGE\_SCOPE**(_url_). @@ -1068,9 +1141,8 @@ _isImports_, _conditions_) > 1. Return _"module"_. > 3. Return _"commonjs"_. > 11. If _url_ does not have any extension, then -> 1. If _packageType_ is _"module"_ and `--experimental-wasm-modules` is -> enabled and the file at _url_ contains the header for a WebAssembly -> module, then +> 1. If _packageType_ is _"module"_ and the file at _url_ contains the +> header for a WebAssembly module, then > 1. Return _"wasm"_. > 2. If _packageType_ is not **null**, then > 1. Return _packageType_. @@ -1134,6 +1206,7 @@ resolution for ESM specifiers is [commonjs-extension-resolution-loader][]. [Node.js Module Resolution And Loading Algorithm]: #resolution-algorithm-specification [Terminology]: #terminology [URL]: https://url.spec.whatwg.org/ +[WebAssembly JS String Builtins Proposal]: https://github.com/WebAssembly/js-string-builtins [`"exports"`]: packages.md#exports [`"type"`]: packages.md#type [`--experimental-default-type`]: cli.md#--experimental-default-typetype diff --git a/doc/api/events.md b/doc/api/events.md index 87a52f00985e5a..2308baf3a00314 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1099,7 +1099,7 @@ changes: description: No longer experimental. --> -* `err` Error +* `err` {Error} * `eventName` {string|symbol} * `...args` {any} @@ -1600,7 +1600,7 @@ changes: description: No longer experimental. --> -Value: {boolean} +* Type: {boolean} Change the default `captureRejections` option on all new `EventEmitter` objects. @@ -1618,7 +1618,7 @@ changes: description: No longer experimental. --> -Value: `Symbol.for('nodejs.rejection')` +* Type: {symbol} `Symbol.for('nodejs.rejection')` See how to write a custom [rejection handler][rejection]. @@ -1679,12 +1679,12 @@ changes: * `eventName` {string|symbol} The name of the event being listened for * `options` {Object} * `signal` {AbortSignal} Can be used to cancel awaiting events. - * `close` - {string\[]} Names of events that will end the iteration. - * `highWaterMark` - {integer} **Default:** `Number.MAX_SAFE_INTEGER` + * `close` {string\[]} Names of events that will end the iteration. + * `highWaterMark` {integer} **Default:** `Number.MAX_SAFE_INTEGER` The high watermark. The emitter is paused every time the size of events being buffered is higher than it. Supported only on emitters implementing `pause()` and `resume()` methods. - * `lowWaterMark` - {integer} **Default:** `1` + * `lowWaterMark` {integer} **Default:** `1` The low watermark. The emitter is resumed every time the size of events being buffered is lower than it. Supported only on emitters implementing `pause()` and `resume()` methods. @@ -1984,7 +1984,7 @@ same options as `EventEmitter` and `AsyncResource` themselves. ### `eventemitterasyncresource.asyncResource` -* Type: The underlying {AsyncResource}. +* Type: {AsyncResource} The underlying {AsyncResource}. The returned `AsyncResource` object has an additional `eventEmitter` property that provides a reference to this `EventEmitterAsyncResource`. diff --git a/doc/api/fs.md b/doc/api/fs.md index a279c648fa23d9..2d542557c4ad26 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -375,7 +375,7 @@ Unlike `filehandle.sync` this method does not flush modified metadata. added: v10.0.0 --> -* {number} The numeric file descriptor managed by the {FileHandle} object. +* Type: {number} The numeric file descriptor managed by the {FileHandle} object. #### `filehandle.read(buffer, offset, length, position)` @@ -1922,7 +1922,7 @@ added: - v16.17.0 --> -* {Object} +* Type: {Object} Returns an object containing commonly used constants for file system operations. The object is the same as `fs.constants`. See [FS constants][] @@ -6379,8 +6379,7 @@ changes: * `target` {string|Buffer|URL} * `path` {string|Buffer|URL} * `type` {string|null} **Default:** `null` - -Returns `undefined`. +* Returns: `undefined`. For detailed information, see the documentation of the asynchronous version of this API: [`fs.symlink()`][]. @@ -6437,8 +6436,7 @@ changes: * `path` {string|Buffer|URL} * `atime` {number|string|Date} * `mtime` {number|string|Date} - -Returns `undefined`. +* Returns: `undefined`. For detailed information, see the documentation of the asynchronous version of this API: [`fs.utimes()`][]. @@ -6489,8 +6487,7 @@ changes: * `flag` {string} See [support of file system `flags`][]. **Default:** `'w'`. * `flush` {boolean} If all data is successfully written to the file, and `flush` is `true`, `fs.fsyncSync()` is used to flush the data. - -Returns `undefined`. +* Returns: `undefined`. The `mode` option only affects the newly created file. See [`fs.open()`][] for more details. @@ -6665,7 +6662,7 @@ Subsequent reads will result in errors. added: v12.12.0 --> -* {string} +* Type: {string} The read-only path of this directory as was provided to [`fs.opendir()`][], [`fs.opendirSync()`][], or [`fsPromises.opendir()`][]. @@ -6863,7 +6860,7 @@ Returns `true` if the {fs.Dirent} object describes a symbolic link. added: v10.10.0 --> -* {string|Buffer} +* Type: {string|Buffer} The file name that this {fs.Dirent} object refers to. The type of this value is determined by the `options.encoding` passed to [`fs.readdir()`][] or @@ -6882,7 +6879,7 @@ changes: description: Marking the API stable. --> -* {string} +* Type: {string} The path to the parent directory of the file this {fs.Dirent} object refers to. @@ -7101,7 +7098,7 @@ Fires immediately after `'open'`. added: v6.4.0 --> -* {number} +* Type: {number} The number of bytes that have been read so far. @@ -7111,7 +7108,7 @@ The number of bytes that have been read so far. added: v0.1.93 --> -* {string|Buffer} +* Type: {string|Buffer} The path to the file the stream is reading from as specified in the first argument to `fs.createReadStream()`. If `path` is passed as a string, then @@ -7127,7 +7124,7 @@ added: - v10.16.0 --> -* {boolean} +* Type: {boolean} This property is `true` if the underlying file has not been opened yet, i.e. before the `'ready'` event is emitted. @@ -7284,49 +7281,49 @@ This method is only valid when using [`fs.lstat()`][]. #### `stats.dev` -* {number|bigint} +* Type: {number|bigint} The numeric identifier of the device containing the file. #### `stats.ino` -* {number|bigint} +* Type: {number|bigint} The file system specific "Inode" number for the file. #### `stats.mode` -* {number|bigint} +* Type: {number|bigint} A bit-field describing the file type and mode. #### `stats.nlink` -* {number|bigint} +* Type: {number|bigint} The number of hard-links that exist for the file. #### `stats.uid` -* {number|bigint} +* Type: {number|bigint} The numeric user identifier of the user that owns the file (POSIX). #### `stats.gid` -* {number|bigint} +* Type: {number|bigint} The numeric group identifier of the group that owns the file (POSIX). #### `stats.rdev` -* {number|bigint} +* Type: {number|bigint} A numeric device identifier if the file represents a device. #### `stats.size` -* {number|bigint} +* Type: {number|bigint} The size of the file in bytes. @@ -7335,13 +7332,13 @@ this will be `0`. #### `stats.blksize` -* {number|bigint} +* Type: {number|bigint} The file system block size for i/o operations. #### `stats.blocks` -* {number|bigint} +* Type: {number|bigint} The number of blocks allocated for this file. @@ -7351,7 +7348,7 @@ The number of blocks allocated for this file. added: v8.1.0 --> -* {number|bigint} +* Type: {number|bigint} The timestamp indicating the last time this file was accessed expressed in milliseconds since the POSIX Epoch. @@ -7362,7 +7359,7 @@ milliseconds since the POSIX Epoch. added: v8.1.0 --> -* {number|bigint} +* Type: {number|bigint} The timestamp indicating the last time this file was modified expressed in milliseconds since the POSIX Epoch. @@ -7373,7 +7370,7 @@ milliseconds since the POSIX Epoch. added: v8.1.0 --> -* {number|bigint} +* Type: {number|bigint} The timestamp indicating the last time the file status was changed expressed in milliseconds since the POSIX Epoch. @@ -7384,7 +7381,7 @@ in milliseconds since the POSIX Epoch. added: v8.1.0 --> -* {number|bigint} +* Type: {number|bigint} The timestamp indicating the creation time of this file expressed in milliseconds since the POSIX Epoch. @@ -7395,7 +7392,7 @@ milliseconds since the POSIX Epoch. added: v12.10.0 --> -* {bigint} +* Type: {bigint} Only present when `bigint: true` is passed into the method that generates the object. @@ -7408,7 +7405,7 @@ nanoseconds since the POSIX Epoch. added: v12.10.0 --> -* {bigint} +* Type: {bigint} Only present when `bigint: true` is passed into the method that generates the object. @@ -7421,7 +7418,7 @@ nanoseconds since the POSIX Epoch. added: v12.10.0 --> -* {bigint} +* Type: {bigint} Only present when `bigint: true` is passed into the method that generates the object. @@ -7434,7 +7431,7 @@ in nanoseconds since the POSIX Epoch. added: v12.10.0 --> -* {bigint} +* Type: {bigint} Only present when `bigint: true` is passed into the method that generates the object. @@ -7447,7 +7444,7 @@ nanoseconds since the POSIX Epoch. added: v0.11.13 --> -* {Date} +* Type: {Date} The timestamp indicating the last time this file was accessed. @@ -7457,7 +7454,7 @@ The timestamp indicating the last time this file was accessed. added: v0.11.13 --> -* {Date} +* Type: {Date} The timestamp indicating the last time this file was modified. @@ -7467,7 +7464,7 @@ The timestamp indicating the last time this file was modified. added: v0.11.13 --> -* {Date} +* Type: {Date} The timestamp indicating the last time the file status was changed. @@ -7477,7 +7474,7 @@ The timestamp indicating the last time the file status was changed. added: v0.11.13 --> -* {Date} +* Type: {Date} The timestamp indicating the creation time of this file. @@ -7569,7 +7566,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Free blocks available to unprivileged users. @@ -7581,7 +7578,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Free blocks in file system. @@ -7593,7 +7590,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Total data blocks in file system. @@ -7605,7 +7602,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Optimal transfer block size. @@ -7617,7 +7614,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Free file nodes in file system. @@ -7629,7 +7626,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Total file nodes in file system. @@ -7641,7 +7638,7 @@ added: - v18.15.0 --> -* {number|bigint} +* Type: {number|bigint} Type of file system. @@ -7723,14 +7720,14 @@ argument to [`fs.createWriteStream()`][]. If `path` is passed as a string, then added: v11.2.0 --> -* {boolean} +* Type: {boolean} This property is `true` if the underlying file has not been opened yet, i.e. before the `'ready'` event is emitted. ### `fs.constants` -* {Object} +* Type: {Object} Returns an object containing commonly used constants for file system operations. diff --git a/doc/api/globals.md b/doc/api/globals.md index 844808d53b7dc6..570e84ca779337 100644 --- a/doc/api/globals.md +++ b/doc/api/globals.md @@ -104,7 +104,7 @@ changes: description: Added the new optional reason argument. --> -* `reason`: {any} +* `reason` {any} * Returns: {AbortSignal} Returns a new already aborted `AbortSignal`. @@ -238,7 +238,7 @@ See {Blob}. added: v0.1.103 --> -* {Function} +* Type: {Function} Used to handle binary data. See the [buffer section][]. @@ -332,7 +332,7 @@ A browser-compatible implementation of [`CompressionStream`][]. added: v0.1.100 --> -* {Object} +* Type: {Object} Used to print to stdout and stderr. See the [`console`][] section. @@ -515,7 +515,7 @@ The dispatcher must be compatible with `undici`'s fetch(url, { dispatcher: new MyAgent() }); ``` -It is possible to change the global dispatcher in Node.js installing `undici` and using +It is possible to change the global dispatcher in Node.js by installing `undici` and using the `setGlobalDispatcher()` method. Calling this method will affect both `undici` and Node.js. @@ -567,7 +567,7 @@ added: v0.1.27 > Stability: 3 - Legacy. Use [`globalThis`][] instead. -* {Object} The global namespace object. +* Type: {Object} The global namespace object. In browsers, the top-level scope has traditionally been the global scope. This means that `var something` will define a new global variable, except within @@ -666,7 +666,7 @@ A partial implementation of [`window.navigator`][]. added: v21.0.0 --> -* {number} +* Type: {number} The `navigator.hardwareConcurrency` read-only property returns the number of logical processors available to the current Node.js instance. @@ -681,7 +681,7 @@ console.log(`This process is running on ${navigator.hardwareConcurrency} logical added: v21.2.0 --> -* {string} +* Type: {string} The `navigator.language` read-only property returns a string representing the preferred language of the Node.js instance. The language will be determined by @@ -702,7 +702,7 @@ console.log(`The preferred language of the Node.js instance has the tag '${navig added: v21.2.0 --> -* {Array} +* Type: {Array} The `navigator.languages` read-only property returns an array of strings representing the preferred languages of the Node.js instance. @@ -722,7 +722,7 @@ console.log(`The preferred languages are '${navigator.languages}'`); added: v21.2.0 --> -* {string} +* Type: {string} The `navigator.platform` read-only property returns a string identifying the platform on which the Node.js instance is running. @@ -737,7 +737,7 @@ console.log(`This process is running on ${navigator.platform}`); added: v21.1.0 --> -* {string} +* Type: {string} The `navigator.userAgent` read-only property returns user agent consisting of the runtime name and major version number. @@ -810,7 +810,7 @@ The [`perf_hooks.performance`][] object. added: v0.1.7 --> -* {Object} +* Type: {Object} The process object. See the [`process` object][] section. @@ -1132,7 +1132,7 @@ The WHATWG `URLSearchParams` class. See the [`URLSearchParams`][] section. added: v8.0.0 --> -* {Object} +* Type: {Object} The object that acts as the namespace for all W3C [WebAssembly][webassembly-org] related functionality. See the diff --git a/doc/api/http.md b/doc/api/http.md index 709b956345fba5..cc65635f1043a2 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -291,7 +291,7 @@ changes: description: The property now has a `null` prototype. --> -* {Object} +* Type: {Object} An object which contains arrays of sockets currently awaiting use by the agent when `keepAlive` is enabled. Do not modify. @@ -332,7 +332,7 @@ that determine socket reusability. added: v0.11.7 --> -* {number} +* Type: {number} By default set to 256. For agents with `keepAlive` enabled, this sets the maximum number of sockets that will be left open in the free @@ -344,7 +344,7 @@ state. added: v0.3.6 --> -* {number} +* Type: {number} By default set to `Infinity`. Determines how many concurrent sockets the agent can have open per origin. Origin is the returned value of [`agent.getName()`][]. @@ -357,7 +357,7 @@ added: - v12.19.0 --> -* {number} +* Type: {number} By default set to `Infinity`. Determines how many concurrent sockets the agent can have open. Unlike `maxSockets`, this parameter applies across all origins. @@ -372,7 +372,7 @@ changes: description: The property now has a `null` prototype. --> -* {Object} +* Type: {Object} An object which contains queues of requests that have not yet been assigned to sockets. Do not modify. @@ -387,7 +387,7 @@ changes: description: The property now has a `null` prototype. --> -* {Object} +* Type: {Object} An object which contains arrays of sockets currently in use by the agent. Do not modify. @@ -836,7 +836,7 @@ changes: > Stability: 0 - Deprecated. Check [`request.destroyed`][] instead. -* {boolean} +* Type: {boolean} The `request.aborted` property will be `true` if the request has been aborted. @@ -850,7 +850,7 @@ deprecated: v13.0.0 > Stability: 0 - Deprecated. Use [`request.socket`][]. -* {stream.Duplex} +* Type: {stream.Duplex} See [`request.socket`][]. @@ -920,7 +920,7 @@ added: - v13.14.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`request.destroy()`][] has been called. @@ -937,7 +937,7 @@ deprecated: > Stability: 0 - Deprecated. Use [`request.writableEnded`][]. -* {boolean} +* Type: {boolean} The `request.finished` property will be `true` if [`request.end()`][] has been called. `request.end()` will automatically be called if the @@ -1069,7 +1069,7 @@ const hasContentType = request.hasHeader('content-type'); ### `request.maxHeadersCount` -* {number} **Default:** `2000` +* Type: {number} **Default:** `2000` Limits maximum response headers count. If set to 0, no limit will be applied. @@ -1079,7 +1079,7 @@ Limits maximum response headers count. If set to 0, no limit will be applied. added: v0.4.0 --> -* {string} The request path. +* Type: {string} The request path. ### `request.method` @@ -1087,7 +1087,7 @@ added: v0.4.0 added: v0.1.97 --> -* {string} The request method. +* Type: {string} The request method. ### `request.host` @@ -1097,7 +1097,7 @@ added: - v12.19.0 --> -* {string} The request host. +* Type: {string} The request host. ### `request.protocol` @@ -1107,7 +1107,7 @@ added: - v12.19.0 --> -* {string} The request protocol. +* Type: {string} The request protocol. ### `request.removeHeader(name)` @@ -1131,7 +1131,7 @@ added: - v12.16.0 --> -* {boolean} Whether the request is send through a reused socket. +* Type: {boolean} Whether the request is send through a reused socket. When sending request through a keep-alive enabled agent, the underlying socket might be reused. But if server closes connection at unfortunate time, client @@ -1306,7 +1306,7 @@ Once a socket is assigned to this request and is connected added: v0.3.0 --> -* {stream.Duplex} +* Type: {stream.Duplex} Reference to the underlying socket. Usually users will not want to access this property. In particular, the socket will not emit `'readable'` events @@ -1362,7 +1362,7 @@ See [`writable.uncork()`][]. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`request.end()`][] has been called. This property does not indicate whether the data has been flushed, for this use @@ -1374,7 +1374,7 @@ does not indicate whether the data has been flushed, for this use added: v12.7.0 --> -* {boolean} +* Type: {boolean} Is `true` if all data has been flushed to the underlying system, immediately before the [`'finish'`][] event is emitted. @@ -1790,7 +1790,7 @@ changes: description: The default is now set to the minimum between 60000 (60 seconds) or `requestTimeout`. --> -* {number} **Default:** The minimum between [`server.requestTimeout`][] or `60000`. +* Type: {number} **Default:** The minimum between [`server.requestTimeout`][] or `60000`. Limit the amount of time the parser will wait to receive the complete HTTP headers. @@ -1813,7 +1813,7 @@ This method is identical to [`server.listen()`][] from [`net.Server`][]. added: v5.7.0 --> -* {boolean} Indicates whether or not the server is listening for connections. +* Type: {boolean} Indicates whether or not the server is listening for connections. ### `server.maxHeadersCount` @@ -1821,7 +1821,7 @@ added: v5.7.0 added: v0.7.0 --> -* {number} **Default:** `2000` +* Type: {number} **Default:** `2000` Limits maximum incoming headers count. If set to 0, no limit will be applied. @@ -1836,7 +1836,7 @@ changes: from no timeout to 300s (5 minutes). --> -* {number} **Default:** `300000` +* Type: {number} **Default:** `300000` Sets the timeout value in milliseconds for receiving the entire request from the client. @@ -1879,7 +1879,7 @@ explicitly. added: v16.10.0 --> -* {number} Requests per socket. **Default:** 0 (no limit) +* Type: {number} Requests per socket. **Default:** 0 (no limit) The maximum number of requests socket can handle before closing keep alive connection. @@ -1900,7 +1900,7 @@ changes: description: The default timeout changed from 120s to 0 (no timeout). --> -* {number} Timeout in milliseconds. **Default:** 0 (no timeout) +* Type: {number} Timeout in milliseconds. **Default:** 0 (no timeout) The number of milliseconds of inactivity before a socket is presumed to have timed out. @@ -1916,22 +1916,43 @@ value only affects new connections to the server, not any existing connections. added: v8.0.0 --> -* {number} Timeout in milliseconds. **Default:** `5000` (5 seconds). +* Type: {number} Timeout in milliseconds. **Default:** `5000` (5 seconds). The number of milliseconds of inactivity a server needs to wait for additional incoming data, after it has finished writing the last response, before a socket -will be destroyed. If the server receives new data before the keep-alive -timeout has fired, it will reset the regular inactivity timeout, i.e., -[`server.timeout`][]. +will be destroyed. + +This timeout value is combined with the +[`server.keepAliveTimeoutBuffer`][] option to determine the actual socket +timeout, calculated as: +socketTimeout = keepAliveTimeout + keepAliveTimeoutBuffer +If the server receives new data before the keep-alive timeout has fired, it +will reset the regular inactivity timeout, i.e., [`server.timeout`][]. A value of `0` will disable the keep-alive timeout behavior on incoming connections. -A value of `0` makes the http server behave similarly to Node.js versions prior +A value of `0` makes the HTTP server behave similarly to Node.js versions prior to 8.0.0, which did not have a keep-alive timeout. The socket timeout logic is set up on connection, so changing this value only affects new connections to the server, not any existing connections. +### `server.keepAliveTimeoutBuffer` + + + +* Type: {number} Timeout in milliseconds. **Default:** `1000` (1 second). + +An additional buffer time added to the +[`server.keepAliveTimeout`][] to extend the internal socket timeout. + +This buffer helps reduce connection reset (`ECONNRESET`) errors by increasing +the socket timeout slightly beyond the advertised keep-alive timeout. + +This option applies only to new incoming connections. + ### `server[Symbol.asyncDispose]()` -* {boolean} +* Type: {boolean} Boolean (read-only). True if headers were sent, false otherwise. @@ -2195,7 +2216,7 @@ response.removeHeader('Content-Encoding'); added: v15.7.0 --> -* {http.IncomingMessage} +* Type: {http.IncomingMessage} A reference to the original HTTP `request` object. @@ -2205,7 +2226,7 @@ A reference to the original HTTP `request` object. added: v0.7.5 --> -* {boolean} +* Type: {boolean} When true, the Date header will be automatically generated and sent in the response if it is not already present in the headers. Defaults to true. @@ -2292,7 +2313,7 @@ timed out sockets must be handled explicitly. added: v0.3.0 --> -* {stream.Duplex} +* Type: {stream.Duplex} Reference to the underlying socket. Usually users will not want to access this property. In particular, the socket will not emit `'readable'` events @@ -2327,7 +2348,7 @@ type other than {net.Socket}. added: v0.4.0 --> -* {number} **Default:** `200` +* Type: {number} **Default:** `200` When using implicit headers (not calling [`response.writeHead()`][] explicitly), this property controls the status code that will be sent to the client when @@ -2346,7 +2367,7 @@ status code which was sent out. added: v0.11.8 --> -* {string} +* Type: {string} When using implicit headers (not calling [`response.writeHead()`][] explicitly), this property controls the status message that will be sent to the client when @@ -2368,7 +2389,7 @@ added: - v16.18.0 --> -* {boolean} **Default:** `false` +* Type: {boolean} **Default:** `false` If set to `true`, Node.js will check whether the `Content-Length` header value and the size of the body, in bytes, are equal. @@ -2391,7 +2412,7 @@ See [`writable.uncork()`][]. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`response.end()`][] has been called. This property does not indicate whether the data has been flushed, for this use @@ -2403,7 +2424,7 @@ does not indicate whether the data has been flushed, for this use added: v12.7.0 --> -* {boolean} +* Type: {boolean} Is `true` if all data has been flushed to the underlying system, immediately before the [`'finish'`][] event is emitted. @@ -2662,7 +2683,7 @@ deprecated: > Stability: 0 - Deprecated. Check `message.destroyed` from {stream.Readable}. -* {boolean} +* Type: {boolean} The `message.aborted` property will be `true` if the request has been aborted. @@ -2673,7 +2694,7 @@ been aborted. added: v0.3.0 --> -* {boolean} +* Type: {boolean} The `message.complete` property will be `true` if a complete HTTP message has been received and successfully parsed. @@ -2748,7 +2769,7 @@ changes: on the prototype and is no longer enumerable. --> -* {Object} +* Type: {Object} The request/response headers object. @@ -2786,7 +2807,7 @@ added: - v16.17.0 --> -* {Object} +* Type: {Object} Similar to [`message.headers`][], but there is no join logic and the values are always arrays of strings, even for headers received just once. @@ -2806,7 +2827,7 @@ console.log(request.headersDistinct); added: v0.1.1 --> -* {string} +* Type: {string} In case of server request, the HTTP version sent by the client. In the case of client response, the HTTP version of the connected-to server. @@ -2821,7 +2842,7 @@ Also `message.httpVersionMajor` is the first integer and added: v0.1.1 --> -* {string} +* Type: {string} **Only valid for request obtained from [`http.Server`][].** @@ -2833,7 +2854,7 @@ The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. added: v0.11.6 --> -* {string\[]} +* Type: {string\[]} The raw request/response headers list exactly as they were received. @@ -2863,7 +2884,7 @@ console.log(request.rawHeaders); added: v0.11.6 --> -* {string\[]} +* Type: {string\[]} The raw request/response trailer keys and values exactly as they were received. Only populated at the `'end'` event. @@ -2886,7 +2907,7 @@ Calls `message.socket.setTimeout(msecs, callback)`. added: v0.3.0 --> -* {stream.Duplex} +* Type: {stream.Duplex} The [`net.Socket`][] object associated with the connection. @@ -2903,7 +2924,7 @@ type other than {net.Socket} or internally nulled. added: v0.1.1 --> -* {number} +* Type: {number} **Only valid for response obtained from [`http.ClientRequest`][].** @@ -2915,7 +2936,7 @@ The 3-digit HTTP response status code. E.G. `404`. added: v0.11.10 --> -* {string} +* Type: {string} **Only valid for response obtained from [`http.ClientRequest`][].** @@ -2928,7 +2949,7 @@ Error`. added: v0.3.0 --> -* {Object} +* Type: {Object} The request/response trailers object. Only populated at the `'end'` event. @@ -2940,7 +2961,7 @@ added: - v16.17.0 --> -* {Object} +* Type: {Object} Similar to [`message.trailers`][], but there is no join logic and the values are always arrays of strings, even for headers received just once. @@ -2952,7 +2973,7 @@ Only populated at the `'end'` event. added: v0.1.90 --> -* {string} +* Type: {string} **Only valid for request obtained from [`http.Server`][].** @@ -3236,7 +3257,7 @@ const hasContentType = outgoingMessage.hasHeader('content-type'); added: v0.9.3 --> -* {boolean} +* Type: {boolean} Read-only. `true` if the headers were sent, otherwise `false`. @@ -3342,7 +3363,7 @@ Once a socket is associated with the message and is connected, added: v0.3.0 --> -* {stream.Duplex} +* Type: {stream.Duplex} Reference to the underlying socket. Usually, users will not want to access this property. @@ -3367,7 +3388,7 @@ added: - v12.16.0 --> -* {number} +* Type: {number} The number of times `outgoingMessage.cork()` has been called. @@ -3377,7 +3398,7 @@ The number of times `outgoingMessage.cork()` has been called. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Is `true` if `outgoingMessage.end()` has been called. This property does not indicate whether the data has been flushed. For that purpose, use @@ -3389,7 +3410,7 @@ not indicate whether the data has been flushed. For that purpose, use added: v12.7.0 --> -* {boolean} +* Type: {boolean} Is `true` if all data has been flushed to the underlying system. @@ -3399,7 +3420,7 @@ Is `true` if all data has been flushed to the underlying system. added: v12.9.0 --> -* {number} +* Type: {number} The `highWaterMark` of the underlying socket if assigned. Otherwise, the default buffer level when [`writable.write()`][] starts returning false (`16384`). @@ -3410,7 +3431,7 @@ buffer level when [`writable.write()`][] starts returning false (`16384`). added: v12.9.0 --> -* {number} +* Type: {number} The number of buffered bytes. @@ -3420,7 +3441,7 @@ The number of buffered bytes. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Always `false`. @@ -3459,7 +3480,7 @@ memory. The `'drain'` event will be emitted when the buffer is free again. added: v0.11.8 --> -* {string\[]} +* Type: {string\[]} A list of the HTTP methods that are supported by the parser. @@ -3469,7 +3490,7 @@ A list of the HTTP methods that are supported by the parser. added: v0.1.22 --> -* {Object} +* Type: {Object} A collection of all the standard HTTP response status codes, and the short description of each. For example, `http.STATUS_CODES[404] === 'Not @@ -3740,7 +3761,7 @@ changes: default. --> -* {http.Agent} +* Type: {http.Agent} Global instance of `Agent` which is used as the default for all HTTP client requests. Diverges from a default `Agent` configuration by having `keepAlive` @@ -3754,7 +3775,7 @@ added: - v10.15.0 --> -* {number} +* Type: {number} Read-only property specifying the maximum allowed size of HTTP headers in bytes. Defaults to 16 KiB. Configurable using the [`--max-http-header-size`][] CLI @@ -4310,6 +4331,7 @@ A browser-compatible implementation of {WebSocket}. [`response.writeHead()`]: #responsewriteheadstatuscode-statusmessage-headers [`server.close()`]: #serverclosecallback [`server.headersTimeout`]: #serverheaderstimeout +[`server.keepAliveTimeoutBuffer`]: #serverkeepalivetimeoutbuffer [`server.keepAliveTimeout`]: #serverkeepalivetimeout [`server.listen()`]: net.md#serverlisten [`server.requestTimeout`]: #serverrequesttimeout diff --git a/doc/api/http2.md b/doc/api/http2.md index 7552784a4ca527..7131693ee2b012 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -451,7 +451,7 @@ session.on('timeout', () => { /* .. */ }); added: v9.4.0 --> -* {string|undefined} +* Type: {string|undefined} Value will be `undefined` if the `Http2Session` is not yet connected to a socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or @@ -480,7 +480,7 @@ If specified, the `callback` function is registered as a handler for the added: v9.4.0 --> -* {boolean} +* Type: {boolean} Will be `true` if this `Http2Session` instance has been closed, otherwise `false`. @@ -491,7 +491,7 @@ Will be `true` if this `Http2Session` instance has been closed, otherwise added: v10.0.0 --> -* {boolean} +* Type: {boolean} Will be `true` if this `Http2Session` instance is still connecting, will be set to `false` before emitting `connect` event and/or calling the `http2.connect` @@ -525,7 +525,7 @@ If there are any remaining open `Http2Streams` associated with the added: v8.4.0 --> -* {boolean} +* Type: {boolean} Will be `true` if this `Http2Session` instance has been destroyed and must no longer be used, otherwise `false`. @@ -536,7 +536,7 @@ longer be used, otherwise `false`. added: v9.4.0 --> -* {boolean|undefined} +* Type: {boolean|undefined} Value is `undefined` if the `Http2Session` session socket has not yet been connected, `true` if the `Http2Session` is connected with a `TLSSocket`, @@ -563,7 +563,7 @@ Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the added: v8.4.0 --> -* {HTTP/2 Settings Object} +* Type: {HTTP/2 Settings Object} A prototype-less object describing the current local settings of this `Http2Session`. The local settings are local to _this_ `Http2Session` instance. @@ -574,7 +574,7 @@ A prototype-less object describing the current local settings of this added: v9.4.0 --> -* {string\[]|undefined} +* Type: {string\[]|undefined} If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property will return an `Array` of origins for which the `Http2Session` may be @@ -588,7 +588,7 @@ The `originSet` property is only available when using a secure TLS connection. added: v8.4.0 --> -* {boolean} +* Type: {boolean} Indicates whether the `Http2Session` is currently waiting for acknowledgment of a sent `SETTINGS` frame. Will be `true` after calling the @@ -655,7 +655,7 @@ instance's underlying [`net.Socket`][]. added: v8.4.0 --> -* {HTTP/2 Settings Object} +* Type: {HTTP/2 Settings Object} A prototype-less object describing the current remote settings of this `Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. @@ -725,7 +725,7 @@ registered as a listener on the `'timeout'` event. added: v8.4.0 --> -* {net.Socket|tls.TLSSocket} +* Type: {net.Socket|tls.TLSSocket} Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but limits available methods to ones safe to use with HTTP/2. @@ -747,7 +747,7 @@ added: v8.4.0 Provides miscellaneous information about the current state of the `Http2Session`. -* {Object} +* Type: {Object} * `effectiveLocalWindowSize` {number} The current local (receive) flow control window size for the `Http2Session`. * `effectiveRecvDataLength` {number} The current number of bytes @@ -805,7 +805,7 @@ multiple `SETTINGS` frames while acknowledgment is still pending. added: v8.4.0 --> -* {number} +* Type: {number} The `http2session.type` will be equal to `http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a @@ -1078,7 +1078,7 @@ changes: `weight` option is deprecated. --> -* `headers` {HTTP/2 Headers Object} | {Array} +* `headers` {HTTP/2 Headers Object|Array} * `options` {Object} * `endStream` {boolean} `true` if the `Http2Stream` _writable_ side should @@ -1366,7 +1366,7 @@ option must be set for this event to be emitted. added: v8.4.0 --> -* {boolean} +* Type: {boolean} Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, the `'aborted'` event will have been emitted. @@ -1379,7 +1379,7 @@ added: - v10.16.0 --> -* {number} +* Type: {number} This property shows the number of characters currently buffered to be written. See [`net.Socket.bufferSize`][] for details. @@ -1410,7 +1410,7 @@ connected HTTP/2 peer. added: v9.4.0 --> -* {boolean} +* Type: {boolean} Set to `true` if the `Http2Stream` instance has been closed. @@ -1420,7 +1420,7 @@ Set to `true` if the `Http2Stream` instance has been closed. added: v8.4.0 --> -* {boolean} +* Type: {boolean} Set to `true` if the `Http2Stream` instance has been destroyed and is no longer usable. @@ -1431,7 +1431,7 @@ usable. added: v10.11.0 --> -* {boolean} +* Type: {boolean} Set to `true` if the `END_STREAM` flag was set in the request or response HEADERS frame received, indicating that no additional data should be received @@ -1443,7 +1443,7 @@ and the readable side of the `Http2Stream` will be closed. added: v8.4.0 --> -* {number|undefined} +* Type: {number|undefined} The numeric stream identifier of this `Http2Stream` instance. Set to `undefined` if the stream identifier has not yet been assigned. @@ -1454,7 +1454,7 @@ if the stream identifier has not yet been assigned. added: v9.4.0 --> -* {boolean} +* Type: {boolean} Set to `true` if the `Http2Stream` instance has not yet been assigned a numeric stream identifier. @@ -1490,7 +1490,7 @@ Updates the priority for this `Http2Stream` instance. added: v8.4.0 --> -* {number} +* Type: {number} Set to the `RST_STREAM` [error code][] reported when the `Http2Stream` is destroyed after either receiving an `RST_STREAM` frame from the connected peer, @@ -1503,7 +1503,7 @@ calling `http2stream.close()`, or `http2stream.destroy()`. Will be added: v9.5.0 --> -* {HTTP/2 Headers Object} +* Type: {HTTP/2 Headers Object} An object containing the outbound headers sent for this `Http2Stream`. @@ -1513,7 +1513,7 @@ An object containing the outbound headers sent for this `Http2Stream`. added: v9.5.0 --> -* {HTTP/2 Headers Object\[]} +* Type: {HTTP/2 Headers Object\[]} An array of objects containing the outbound informational (additional) headers sent for this `Http2Stream`. @@ -1524,7 +1524,7 @@ sent for this `Http2Stream`. added: v9.5.0 --> -* {HTTP/2 Headers Object} +* Type: {HTTP/2 Headers Object} An object containing the outbound trailers sent for this `HttpStream`. @@ -1534,7 +1534,7 @@ An object containing the outbound trailers sent for this `HttpStream`. added: v8.4.0 --> -* {Http2Session} +* Type: {Http2Session} A reference to the `Http2Session` instance that owns this `Http2Stream`. The value will be `undefined` after the `Http2Stream` instance is destroyed. @@ -1588,7 +1588,7 @@ changes: Provides miscellaneous information about the current state of the `Http2Stream`. -* {Object} +* Type: {Object} * `localWindowSize` {number} The number of bytes the connected peer may send for this `Http2Stream` without receiving a `WINDOW_UPDATE`. * `state` {number} A flag indicating the low-level current state of the @@ -1771,7 +1771,7 @@ Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. added: v8.4.0 --> -* {boolean} +* Type: {boolean} True if headers were sent, false otherwise (read-only). @@ -1781,7 +1781,7 @@ True if headers were sent, false otherwise (read-only). added: v8.4.0 --> -* {boolean} +* Type: {boolean} Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote client's most recent `SETTINGS` frame. Will be `true` if the remote peer @@ -2484,7 +2484,7 @@ changes: description: The default timeout changed from 120s to 0 (no timeout). --> -* {number} Timeout in milliseconds. **Default:** 0 (no timeout) +* Type: {number} Timeout in milliseconds. **Default:** 0 (no timeout) The number of milliseconds of inactivity before a socket is presumed to have timed out. @@ -2753,7 +2753,7 @@ changes: description: The default timeout changed from 120s to 0 (no timeout). --> -* {number} Timeout in milliseconds. **Default:** 0 (no timeout) +* Type: {number} Timeout in milliseconds. **Default:** 0 (no timeout) The number of milliseconds of inactivity before a socket is presumed to have timed out. @@ -2920,7 +2920,7 @@ changes: and trailing whitespace validation for HTTP/2 header field names and values as per [RFC-9113](https://www.rfc-editor.org/rfc/rfc9113.html#section-8.2.1). **Default:** `true`. - * ...: Any [`net.createServer()`][] option can be provided. + * `...options` {Object} Any [`net.createServer()`][] option can be provided. * `onRequestHandler` {Function} See [Compatibility API][] * Returns: {Http2Server} @@ -3083,8 +3083,8 @@ changes: settings types, which are included in the `customSettings`-property of the received remoteSettings. Please see the `customSettings`-property of the `Http2Settings` object for more information, on the allowed setting types. - * ...: Any [`tls.createServer()`][] options can be provided. For - servers, the identity options (`pfx` or `key`/`cert`) are usually required. + * `...options` {Object} Any [`tls.createServer()`][] options can be provided. + For servers, the identity options (`pfx` or `key`/`cert`) are usually required. * `origins` {string\[]} An array of origin strings to send within an `ORIGIN` frame immediately following creation of a new server `Http2Session`. * `unknownProtocolTimeout` {number} Specifies a timeout in milliseconds that @@ -3245,7 +3245,8 @@ changes: * `createConnection` {Function} An optional callback that receives the `URL` instance passed to `connect` and the `options` object, and returns any [`Duplex`][] stream that is to be used as the connection for this session. - * ...: Any [`net.connect()`][] or [`tls.connect()`][] options can be provided. + * `...options` {Object} Any [`net.connect()`][] or [`tls.connect()`][] options + can be provided. * `unknownProtocolTimeout` {number} Specifies a timeout in milliseconds that a server should wait when an [`'unknownProtocol'`][] event is emitted. If the socket has not been destroyed by that time the server will destroy it. @@ -3370,8 +3371,7 @@ added: --> * `socket` {stream.Duplex} -* `options` {Object} - * ...: Any [`http2.createServer()`][] option can be provided. +* `options` {Object} Any [`http2.createServer()`][] option can be provided. * Returns: {ServerHttp2Session} Create an HTTP/2 server session from an existing socket. @@ -3384,7 +3384,7 @@ added: - v14.18.0 --> -* {symbol} +* Type: {symbol} This symbol can be set as a property on the HTTP/2 headers object with an array value in order to provide a list of headers considered sensitive. @@ -3948,7 +3948,7 @@ Just like `'end'`, this event occurs only once per response. added: v10.1.0 --> -* {boolean} +* Type: {boolean} The `request.aborted` property will be `true` if the request has been aborted. @@ -3959,7 +3959,7 @@ been aborted. added: v8.4.0 --> -* {string} +* Type: {string} The request authority pseudo header field. Because HTTP/2 allows requests to set either `:authority` or `host`, this value is derived from @@ -3972,7 +3972,7 @@ to set either `:authority` or `host`, this value is derived from added: v12.10.0 --> -* {boolean} +* Type: {boolean} The `request.complete` property will be `true` if the request has been completed, aborted, or destroyed. @@ -3986,7 +3986,7 @@ deprecated: v13.0.0 > Stability: 0 - Deprecated. Use [`request.socket`][]. -* {net.Socket|tls.TLSSocket} +* Type: {net.Socket|tls.TLSSocket} See [`request.socket`][]. @@ -4010,7 +4010,7 @@ It does nothing if the stream was already destroyed. added: v8.4.0 --> -* {Object} +* Type: {Object} The request/response headers object. @@ -4044,7 +4044,7 @@ assert(request.url); // Fails because the :path header has been removed added: v8.4.0 --> -* {string} +* Type: {string} In case of server request, the HTTP version sent by the client. In the case of client response, the HTTP version of the connected-to server. Returns @@ -4059,7 +4059,7 @@ Also `message.httpVersionMajor` is the first integer and added: v8.4.0 --> -* {string} +* Type: {string} The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. @@ -4069,7 +4069,7 @@ The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. added: v8.4.0 --> -* {string\[]} +* Type: {string\[]} The raw request/response headers list exactly as they were received. @@ -4099,7 +4099,7 @@ console.log(request.rawHeaders); added: v8.4.0 --> -* {string\[]} +* Type: {string\[]} The raw request/response trailer keys and values exactly as they were received. Only populated at the `'end'` event. @@ -4110,7 +4110,7 @@ received. Only populated at the `'end'` event. added: v8.4.0 --> -* {string} +* Type: {string} The request scheme pseudo header field indicating the scheme portion of the target URL. @@ -4140,7 +4140,7 @@ events, timed out sockets must be handled explicitly. added: v8.4.0 --> -* {net.Socket|tls.TLSSocket} +* Type: {net.Socket|tls.TLSSocket} Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but applies getters, setters, and methods based on HTTP/2 logic. @@ -4167,7 +4167,7 @@ authentication details. added: v8.4.0 --> -* {Http2Stream} +* Type: {Http2Stream} The [`Http2Stream`][] object backing the request. @@ -4177,7 +4177,7 @@ The [`Http2Stream`][] object backing the request. added: v8.4.0 --> -* {Object} +* Type: {Object} The request/response trailers object. Only populated at the `'end'` event. @@ -4187,7 +4187,7 @@ The request/response trailers object. Only populated at the `'end'` event. added: v8.4.0 --> -* {string} +* Type: {string} Request URL string. This contains only the URL that is present in the actual HTTP request. If the request is: @@ -4314,7 +4314,7 @@ deprecated: v13.0.0 > Stability: 0 - Deprecated. Use [`response.socket`][]. -* {net.Socket|tls.TLSSocket} +* Type: {net.Socket|tls.TLSSocket} See [`response.socket`][]. @@ -4380,7 +4380,7 @@ deprecated: > Stability: 0 - Deprecated. Use [`response.writableEnded`][]. -* {boolean} +* Type: {boolean} Boolean value that indicates whether the response has completed. Starts as `false`. After [`response.end()`][] executes, the value will be `true`. @@ -4469,7 +4469,7 @@ const hasContentType = response.hasHeader('content-type'); added: v8.4.0 --> -* {boolean} +* Type: {boolean} True if headers were sent, false otherwise (read-only). @@ -4493,7 +4493,7 @@ response.removeHeader('Content-Encoding'); added: v15.7.0 --> -* {http2.Http2ServerRequest} +* Type: {http2.Http2ServerRequest} A reference to the original HTTP2 `request` object. @@ -4503,7 +4503,7 @@ A reference to the original HTTP2 `request` object. added: v8.4.0 --> -* {boolean} +* Type: {boolean} When true, the Date header will be automatically generated and sent in the response if it is not already present in the headers. Defaults to true. @@ -4576,7 +4576,7 @@ events, timed out sockets must be handled explicitly. added: v8.4.0 --> -* {net.Socket|tls.TLSSocket} +* Type: {net.Socket|tls.TLSSocket} Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but applies getters, setters, and methods based on HTTP/2 logic. @@ -4619,7 +4619,7 @@ const server = http2.createServer((req, res) => { added: v8.4.0 --> -* {number} +* Type: {number} When using implicit headers (not calling [`response.writeHead()`][] explicitly), this property controls the status code that will be sent to the client when @@ -4638,7 +4638,7 @@ status code which was sent out. added: v8.4.0 --> -* {string} +* Type: {string} Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns an empty string. @@ -4649,7 +4649,7 @@ an empty string. added: v8.4.0 --> -* {Http2Stream} +* Type: {Http2Stream} The [`Http2Stream`][] object backing the response. @@ -4659,7 +4659,7 @@ The [`Http2Stream`][] object backing the response. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`response.end()`][] has been called. This property does not indicate whether the data has been flushed, for this use diff --git a/doc/api/https.md b/doc/api/https.md index 86b7fd0be1cdf9..29c60135e42718 100644 --- a/doc/api/https.md +++ b/doc/api/https.md @@ -166,7 +166,7 @@ See [`server.closeIdleConnections()`][] in the `node:http` module. added: v11.3.0 --> -* {number} **Default:** `60000` +* Type: {number} **Default:** `60000` See [`server.headersTimeout`][] in the `node:http` module. @@ -177,7 +177,7 @@ This method is identical to [`server.listen()`][] from [`net.Server`][]. ### `server.maxHeadersCount` -* {number} **Default:** `2000` +* Type: {number} **Default:** `2000` See [`server.maxHeadersCount`][] in the `node:http` module. @@ -192,7 +192,7 @@ changes: from no timeout to 300s (5 minutes). --> -* {number} **Default:** `300000` +* Type: {number} **Default:** `300000` See [`server.requestTimeout`][] in the `node:http` module. @@ -218,7 +218,7 @@ changes: description: The default timeout changed from 120s to 0 (no timeout). --> -* {number} **Default:** 0 (no timeout) +* Type: {number} **Default:** 0 (no timeout) See [`server.timeout`][] in the `node:http` module. @@ -228,7 +228,7 @@ See [`server.timeout`][] in the `node:http` module. added: v8.0.0 --> -* {number} **Default:** `5000` (5 seconds) +* Type: {number} **Default:** `5000` (5 seconds) See [`server.keepAliveTimeout`][] in the `node:http` module. diff --git a/doc/api/inspector.md b/doc/api/inspector.md index 5e9ef3a6326ece..22cf8d13c831b6 100644 --- a/doc/api/inspector.md +++ b/doc/api/inspector.md @@ -64,7 +64,7 @@ command. added: v8.0.0 --> -* {Object} The notification message object +* Type: {Object} The notification message object Emitted when any notification from the V8 Inspector is received. @@ -79,13 +79,13 @@ session.on('inspectorNotification', (message) => console.log(message.method)); It is also possible to subscribe only to notifications with specific method: -#### Event: ``; +#### Event: `` -* {Object} The notification message object +* Type: {Object} The notification message object Emitted when an inspector notification is received that has its method field set to the `` value. @@ -243,7 +243,7 @@ command. added: v8.0.0 --> -* {Object} The notification message object +* Type: {Object} The notification message object Emitted when any notification from the V8 Inspector is received. @@ -264,7 +264,7 @@ It is also possible to subscribe only to notifications with specific method: added: v8.0.0 --> -* {Object} The notification message object +* Type: {Object} The notification message object Emitted when an inspector notification is received that has its method field set to the `` value. @@ -420,7 +420,7 @@ are closed. Once all connections are closed, deactivates the inspector. ### `inspector.console` -* {Object} An object to send messages to the remote inspector console. +* Type: {Object} An object to send messages to the remote inspector console. ```js require('node:inspector').console.log('a message'); @@ -594,6 +594,43 @@ This feature is only available with the `--experimental-network-inspection` flag Broadcasts the `Network.loadingFailed` event to connected frontends. This event indicates that HTTP request has failed to load. +### `inspector.NetworkResources.put` + + + +> Stability: 1.1 - Active Development + +This feature is only available with the `--experimental-inspector-network-resource` flag enabled. + +The inspector.NetworkResources.put method is used to provide a response for a loadNetworkResource +request issued via the Chrome DevTools Protocol (CDP). +This is typically triggered when a source map is specified by URL, and a DevTools frontend—such as +Chrome—requests the resource to retrieve the source map. + +This method allows developers to predefine the resource content to be served in response to such CDP requests. + +```js +const inspector = require('node:inspector'); +// By preemptively calling put to register the resource, a source map can be resolved when +// a loadNetworkResource request is made from the frontend. +async function setNetworkResources() { + const mapUrl = 'http://localhost:3000/dist/app.js.map'; + const tsUrl = 'http://localhost:3000/src/app.ts'; + const distAppJsMap = await fetch(mapUrl).then((res) => res.text()); + const srcAppTs = await fetch(tsUrl).then((res) => res.text()); + inspector.NetworkResources.put(mapUrl, distAppJsMap); + inspector.NetworkResources.put(tsUrl, srcAppTs); +}; +setNetworkResources().then(() => { + require('./dist/app'); +}); +``` + +For more details, see the official CDP documentation: [Network.loadNetworkResource](https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-loadNetworkResource) + ## Support of breakpoints The Chrome DevTools Protocol [`Debugger` domain][] allows an diff --git a/doc/api/module.md b/doc/api/module.md index 0fad98bc1422de..cd4a6474627c3d 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -8,7 +8,7 @@ added: v0.3.7 ## The `Module` object -* {Object} +* Type: {Object} Provides general utility methods when interacting with instances of `Module`, the [`module`][] variable often seen in [CommonJS][] modules. Accessed @@ -23,7 +23,7 @@ added: - v6.13.0 --> -* {string\[]} +* Type: {string\[]} A list of the names of all modules provided by Node.js. Can be used to verify if a module is maintained by a third party or not. @@ -1821,13 +1821,13 @@ Creates a new `sourceMap` instance. `payload` is an object with keys matching the [Source map format][]: -* `file`: {string} -* `version`: {number} -* `sources`: {string\[]} -* `sourcesContent`: {string\[]} -* `names`: {string\[]} -* `mappings`: {string} -* `sourceRoot`: {string} +* `file` {string} +* `version` {number} +* `sources` {string\[]} +* `sourcesContent` {string\[]} +* `names` {string\[]} +* `mappings` {string} +* `sourceRoot` {string} `lineLengths` is an optional array of the length of each line in the generated code. @@ -1852,17 +1852,17 @@ original file if found, or an empty object if not. The object returned contains the following keys: -* generatedLine: {number} The line offset of the start of the +* `generatedLine` {number} The line offset of the start of the range in the generated source -* generatedColumn: {number} The column offset of start of the +* `generatedColumn` {number} The column offset of start of the range in the generated source -* originalSource: {string} The file name of the original source, +* `originalSource` {string} The file name of the original source, as reported in the SourceMap -* originalLine: {number} The line offset of the start of the +* `originalLine` {number} The line offset of the start of the range in the original source -* originalColumn: {number} The column offset of start of the +* `originalColumn` {number} The column offset of start of the range in the original source -* name: {string} +* `name` {string} The returned value represents the raw range as it appears in the SourceMap, based on zero-indexed offsets, _not_ 1-indexed line and @@ -1896,13 +1896,13 @@ If the `lineNumber` and `columnNumber` provided are not found in any source map, then an empty object is returned. Otherwise, the returned object contains the following keys: -* name: {string | undefined} The name of the range in the +* `name` {string|undefined} The name of the range in the source map, if one was provided -* fileName: {string} The file name of the original source, as +* `fileName` {string} The file name of the original source, as reported in the SourceMap -* lineNumber: {number} The 1-indexed lineNumber of the +* `lineNumber` {number} The 1-indexed lineNumber of the corresponding call site in the original source -* columnNumber: {number} The 1-indexed columnNumber of the +* `columnNumber` {number} The 1-indexed columnNumber of the corresponding call site in the original source [CommonJS]: modules.md diff --git a/doc/api/modules.md b/doc/api/modules.md index 3cf0f4a928b53d..c57a3ae4491c79 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -762,9 +762,7 @@ By doing this, Node.js achieves a few things: added: v0.1.27 --> - - -* {string} +* Type: {string} The directory name of the current module. This is the same as the [`path.dirname()`][] of the [`__filename`][]. @@ -784,9 +782,7 @@ console.log(path.dirname(__filename)); added: v0.0.1 --> - - -* {string} +* Type: {string} The file name of the current module. This is the current module file's absolute path with symlinks resolved. @@ -823,9 +819,7 @@ References to `__filename` within `b.js` will return added: v0.1.12 --> - - -* {Object} +* Type: {Object} A reference to the `module.exports` that is shorter to type. See the section about the [exports shortcut][] for details on when to use @@ -837,9 +831,7 @@ See the section about the [exports shortcut][] for details on when to use added: v0.1.16 --> - - -* {module} +* Type: {module} A reference to the current module, see the section about the [`module` object][]. In particular, `module.exports` is used for defining what @@ -851,8 +843,6 @@ a module exports and makes available through `require()`. added: v0.1.13 --> - - * `id` {string} module name or path * Returns: {any} exported module content @@ -882,7 +872,7 @@ const crypto = require('node:crypto'); added: v0.3.0 --> -* {Object} +* Type: {Object} Modules are cached in this object when they are required. By deleting a key value from this object, the next `require` will reload the module. @@ -916,7 +906,7 @@ deprecated: v0.10.6 > Stability: 0 - Deprecated -* {Object} +* Type: {Object} Instruct `require` on how to handle certain file extensions. @@ -940,7 +930,7 @@ extensions gets slower with each registered extension. added: v0.1.17 --> -* {module | undefined} +* Type: {module | undefined} The `Module` object representing the entry script loaded when the Node.js process launched, or `undefined` if the entry point of the program is not a @@ -1018,11 +1008,9 @@ Returns an array containing the paths searched during resolution of `request` or added: v0.1.16 --> - - -* {Object} +* Type: {Object} In each module, the `module` free variable is a reference to the object representing the current module. For convenience, `module.exports` is @@ -1035,7 +1023,7 @@ a global but rather local to each module. added: v0.1.16 --> -* {module\[]} +* Type: {module\[]} The module objects required for the first time by this one. @@ -1045,7 +1033,7 @@ The module objects required for the first time by this one. added: v0.1.16 --> -* {Object} +* Type: {Object} The `module.exports` object is created by the `Module` system. Sometimes this is not acceptable; many want their module to be an instance of some class. To do @@ -1149,7 +1137,7 @@ function require(/* ... */) { added: v0.1.16 --> -* {string} +* Type: {string} The fully resolved filename of the module. @@ -1159,7 +1147,7 @@ The fully resolved filename of the module. added: v0.1.16 --> -* {string} +* Type: {string} The identifier for the module. Typically this is the fully resolved filename. @@ -1181,7 +1169,7 @@ added: added: v0.1.16 --> -* {boolean} +* Type: {boolean} Whether or not the module is done loading, or is in the process of loading. @@ -1198,7 +1186,7 @@ deprecated: > Stability: 0 - Deprecated: Please use [`require.main`][] and > [`module.children`][] instead. -* {module | null | undefined} +* Type: {module | null | undefined} The module that first required this one, or `null` if the current module is the entry point of the current process, or `undefined` if the module was loaded by @@ -1210,7 +1198,7 @@ something that is not a CommonJS module (E.G.: REPL or `import`). added: v11.14.0 --> -* {string} +* Type: {string} The directory name of the module. This is usually the same as the [`path.dirname()`][] of the [`module.id`][]. @@ -1221,7 +1209,7 @@ The directory name of the module. This is usually the same as the added: v0.4.0 --> -* {string\[]} +* Type: {string\[]} The search paths for the module. diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 211bd73ed4cf1e..d16ff2ec29d498 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -446,7 +446,7 @@ NAPI_MODULE_INIT(/* napi_env env, napi_value exports */) { ## Environment life cycle APIs -[Section 8.7][] of the [ECMAScript Language Specification][] defines the concept +[Section Agents][] of the [ECMAScript Language Specification][] defines the concept of an "Agent" as a self-contained environment in which JavaScript code runs. Multiple such Agents may be started and terminated either concurrently or in sequence by the process. @@ -2139,7 +2139,7 @@ API. ## Working with JavaScript values Node-API exposes a set of APIs to create all types of JavaScript values. -Some of these types are documented under [Section 6][] +Some of these types are documented under [Section language types][] of the [ECMAScript Language Specification][]. Fundamentally, these APIs are used to do one of the following: @@ -2245,7 +2245,7 @@ typedef enum { ``` Describes the type of a `napi_value`. This generally corresponds to the types -described in [Section 6.1][] of the ECMAScript Language Specification. +described in [Section language types][] of the ECMAScript Language Specification. In addition to types in that section, `napi_valuetype` can also represent `Function`s and `Object`s with external data. @@ -2272,7 +2272,7 @@ typedef enum { This represents the underlying binary scalar datatype of the `TypedArray`. Elements of this enum correspond to -[Section 22.2][] of the [ECMAScript Language Specification][]. +[Section TypedArray objects][] of the [ECMAScript Language Specification][]. ### Object creation functions @@ -2294,7 +2294,7 @@ Returns `napi_ok` if the API succeeded. This API returns a Node-API value corresponding to a JavaScript `Array` type. JavaScript arrays are described in -[Section 22.1][] of the ECMAScript Language Specification. +[Section Array objects][] of the ECMAScript Language Specification. #### `napi_create_array_with_length` @@ -2324,7 +2324,7 @@ directly read and/or written via C, consider using [`napi_create_external_arraybuffer`][]. JavaScript arrays are described in -[Section 22.1][] of the ECMAScript Language Specification. +[Section Array objects][] of the ECMAScript Language Specification. #### `napi_create_arraybuffer` @@ -2359,7 +2359,7 @@ written to directly from native code. To write to this buffer from JavaScript, a typed array or `DataView` object would need to be created. JavaScript `ArrayBuffer` objects are described in -[Section 24.1][] of the ECMAScript Language Specification. +[Section ArrayBuffer objects][] of the ECMAScript Language Specification. #### `napi_create_buffer` @@ -2442,7 +2442,7 @@ ECMAScript aligns with POSIX time specification. This API allocates a JavaScript `Date` object. JavaScript `Date` objects are described in -[Section 20.3][] of the ECMAScript Language Specification. +[Section Date objects][] of the ECMAScript Language Specification. #### `napi_create_external` @@ -2531,7 +2531,7 @@ The API adds a `napi_finalize` callback which will be called when the JavaScript object just created has been garbage collected. JavaScript `ArrayBuffer`s are described in -[Section 24.1][] of the ECMAScript Language Specification. +[Section ArrayBuffer objects][] of the ECMAScript Language Specification. #### `napi_create_external_buffer` @@ -2602,7 +2602,7 @@ Returns `napi_ok` if the API succeeded. This API allocates a default JavaScript `Object`. It is the equivalent of doing `new Object()` in JavaScript. -The JavaScript `Object` type is described in [Section 6.1.7][] of the +The JavaScript `Object` type is described in [Section object type][] of the ECMAScript Language Specification. #### `napi_create_symbol` @@ -2627,7 +2627,7 @@ Returns `napi_ok` if the API succeeded. This API creates a JavaScript `symbol` value from a UTF8-encoded C string. -The JavaScript `symbol` type is described in [Section 19.4][] +The JavaScript `symbol` type is described in [Section symbol type][] of the ECMAScript Language Specification. #### `node_api_symbol_for` @@ -2659,7 +2659,7 @@ This API searches in the global registry for an existing symbol with the given description. If the symbol already exists it will be returned, otherwise a new symbol will be created in the registry. -The JavaScript `symbol` type is described in [Section 19.4][] of the ECMAScript +The JavaScript `symbol` type is described in [Section symbol type][] of the ECMAScript Language Specification. #### `napi_create_typedarray` @@ -2698,7 +2698,7 @@ be <= the size in bytes of the array passed in. If not, a `RangeError` exception is raised. JavaScript `TypedArray` objects are described in -[Section 22.2][] of the ECMAScript Language Specification. +[Section TypedArray objects][] of the ECMAScript Language Specification. #### `node_api_create_buffer_from_arraybuffer` @@ -2763,7 +2763,7 @@ size in bytes of the array passed in. If not, a `RangeError` exception is raised. JavaScript `DataView` objects are described in -[Section 24.3][] of the ECMAScript Language Specification. +[Section DataView objects][] of the ECMAScript Language Specification. ### Functions to convert from C types to Node-API @@ -2788,7 +2788,7 @@ This API is used to convert from the C `int32_t` type to the JavaScript `number` type. The JavaScript `number` type is described in -[Section 6.1.6][] of the ECMAScript Language Specification. +[Section number type][] of the ECMAScript Language Specification. #### `napi_create_uint32` @@ -2811,7 +2811,7 @@ This API is used to convert from the C `uint32_t` type to the JavaScript `number` type. The JavaScript `number` type is described in -[Section 6.1.6][] of the ECMAScript Language Specification. +[Section number type][] of the ECMAScript Language Specification. #### `napi_create_int64` @@ -2833,7 +2833,7 @@ Returns `napi_ok` if the API succeeded. This API is used to convert from the C `int64_t` type to the JavaScript `number` type. -The JavaScript `number` type is described in [Section 6.1.6][] +The JavaScript `number` type is described in [Section number type][] of the ECMAScript Language Specification. Note the complete range of `int64_t` cannot be represented with full precision in JavaScript. Integer values outside the range of [`Number.MIN_SAFE_INTEGER`][] `-(2**53 - 1)` - @@ -2860,7 +2860,7 @@ This API is used to convert from the C `double` type to the JavaScript `number` type. The JavaScript `number` type is described in -[Section 6.1.6][] of the ECMAScript Language Specification. +[Section number type][] of the ECMAScript Language Specification. #### `napi_create_bigint_int64` @@ -2960,7 +2960,7 @@ This API creates a JavaScript `string` value from an ISO-8859-1-encoded C string. The native string is copied. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `node_api_create_external_string_latin1` @@ -3011,7 +3011,7 @@ string. The native string may not be copied and must thus exist for the entire life cycle of the JavaScript value. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `napi_create_string_utf16` @@ -3039,7 +3039,7 @@ This API creates a JavaScript `string` value from a UTF16-LE-encoded C string. The native string is copied. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `node_api_create_external_string_utf16` @@ -3090,7 +3090,7 @@ The native string may not be copied and must thus exist for the entire life cycle of the JavaScript value. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `napi_create_string_utf8` @@ -3118,7 +3118,7 @@ This API creates a JavaScript `string` value from a UTF8-encoded C string. The native string is copied. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. ### Functions to create optimized property keys @@ -3165,7 +3165,7 @@ subsequent calls to this function with the same `str` pointer may benefit from a in the creation of the requested `napi_value`, depending on the engine. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `node_api_create_property_key_utf16` @@ -3197,7 +3197,7 @@ a UTF16-LE-encoded C string to be used as a property key for objects. The native string is copied. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. #### `node_api_create_property_key_utf8` @@ -3227,7 +3227,7 @@ a UTF8-encoded C string to be used as a property key for objects. The native string is copied. The JavaScript `string` type is described in -[Section 6.1.4][] of the ECMAScript Language Specification. +[Section string type][] of the ECMAScript Language Specification. ### Functions to convert from Node-API to C types @@ -3253,7 +3253,7 @@ Returns `napi_ok` if the API succeeded. This API returns the length of an array. -`Array` length is described in [Section 22.1.4.1][] of the ECMAScript Language +`Array` length is described in [Section Array instance length][] of the ECMAScript Language Specification. #### `napi_get_arraybuffer_info` @@ -3862,8 +3862,7 @@ This API returns the Undefined object. ## Working with JavaScript values and abstract operations Node-API exposes a set of APIs to perform some abstract operations on JavaScript -values. Some of these operations are documented under [Section 7][] -of the [ECMAScript Language Specification][]. +values. These APIs support doing one of the following: @@ -3892,7 +3891,7 @@ napi_status napi_coerce_to_bool(napi_env env, Returns `napi_ok` if the API succeeded. This API implements the abstract operation `ToBoolean()` as defined in -[Section 7.1.2][] of the ECMAScript Language Specification. +[Section ToBoolean][] of the ECMAScript Language Specification. ### `napi_coerce_to_number` @@ -3914,7 +3913,7 @@ napi_status napi_coerce_to_number(napi_env env, Returns `napi_ok` if the API succeeded. This API implements the abstract operation `ToNumber()` as defined in -[Section 7.1.3][] of the ECMAScript Language Specification. +[Section ToNumber][] of the ECMAScript Language Specification. This function potentially runs JS code if the passed-in value is an object. @@ -3938,7 +3937,7 @@ napi_status napi_coerce_to_object(napi_env env, Returns `napi_ok` if the API succeeded. This API implements the abstract operation `ToObject()` as defined in -[Section 7.1.13][] of the ECMAScript Language Specification. +[Section ToObject][] of the ECMAScript Language Specification. ### `napi_coerce_to_string` @@ -3960,7 +3959,7 @@ napi_status napi_coerce_to_string(napi_env env, Returns `napi_ok` if the API succeeded. This API implements the abstract operation `ToString()` as defined in -[Section 7.1.13][] of the ECMAScript Language Specification. +[Section ToString][] of the ECMAScript Language Specification. This function potentially runs JS code if the passed-in value is an object. @@ -3985,7 +3984,7 @@ Returns `napi_ok` if the API succeeded. `value` is not an External value. This API represents behavior similar to invoking the `typeof` Operator on -the object as defined in [Section 12.5.5][] of the ECMAScript Language +the object as defined in [Section typeof operator][] of the ECMAScript Language Specification. However, there are some differences: 1. It has support for detecting an External value. @@ -4018,7 +4017,7 @@ napi_status napi_instanceof(napi_env env, Returns `napi_ok` if the API succeeded. This API represents invoking the `instanceof` Operator on the object as -defined in [Section 12.10.4][] of the ECMAScript Language Specification. +defined in [Section instanceof operator][] of the ECMAScript Language Specification. ### `napi_is_array` @@ -4038,7 +4037,7 @@ napi_status napi_is_array(napi_env env, napi_value value, bool* result) Returns `napi_ok` if the API succeeded. This API represents invoking the `IsArray` operation on the object -as defined in [Section 7.2.2][] of the ECMAScript Language Specification. +as defined in [Section IsArray][] of the ECMAScript Language Specification. ### `napi_is_arraybuffer` @@ -4182,7 +4181,7 @@ napi_status napi_strict_equals(napi_env env, Returns `napi_ok` if the API succeeded. This API represents the invocation of the Strict Equality algorithm as -defined in [Section 7.2.14][] of the ECMAScript Language Specification. +defined in [Section IsStrctEqual][] of the ECMAScript Language Specification. ### `napi_detach_arraybuffer` @@ -4211,7 +4210,7 @@ detachable. For example, V8 requires that the `ArrayBuffer` be external, that is, created with [`napi_create_external_arraybuffer`][]. This API represents the invocation of the `ArrayBuffer` detach operation as -defined in [Section 24.1.1.3][] of the ECMAScript Language Specification. +defined in [Section detachArrayBuffer][] of the ECMAScript Language Specification. ### `napi_is_detached_arraybuffer` @@ -4238,14 +4237,13 @@ Returns `napi_ok` if the API succeeded. The `ArrayBuffer` is considered detached if its internal data is `null`. This API represents the invocation of the `ArrayBuffer` `IsDetachedBuffer` -operation as defined in [Section 24.1.1.2][] of the ECMAScript Language +operation as defined in [Section isDetachedBuffer][] of the ECMAScript Language Specification. ## Working with JavaScript properties Node-API exposes a set of APIs to get and set properties on JavaScript -objects. Some of these types are documented under [Section 7][] of the -[ECMAScript Language Specification][]. +objects. Properties in JavaScript are represented as a tuple of a key and a value. Fundamentally, all property keys in Node-API can be represented in one of the @@ -4415,7 +4413,7 @@ typedef enum { `napi_property_attributes` are flags used to control the behavior of properties set on a JavaScript object. Other than `napi_static` they correspond to the -attributes listed in [Section 6.1.7.1][] +attributes listed in [Section property attributes][] of the [ECMAScript Language Specification][]. They can be one or more of the following bitflags: @@ -4424,7 +4422,7 @@ They can be one or more of the following bitflags: * `napi_writable`: The property is writable. * `napi_enumerable`: The property is enumerable. * `napi_configurable`: The property is configurable as defined in - [Section 6.1.7.1][] of the [ECMAScript Language Specification][]. + [Section property attributes][] of the [ECMAScript Language Specification][]. * `napi_static`: The property will be defined as a static property on a class as opposed to an instance property, which is the default. This is used only by [`napi_define_class`][]. It is ignored by `napi_define_properties`. @@ -4850,7 +4848,7 @@ This method allows the efficient definition of multiple properties on a given object. The properties are defined using property descriptors (see [`napi_property_descriptor`][]). Given an array of such property descriptors, this API will set the properties on the object one at a time, as defined by -`DefineOwnProperty()` (described in [Section 9.1.6][] of the ECMA-262 +`DefineOwnProperty()` (described in [Section DefineOwnProperty][] of the ECMA-262 specification). #### `napi_object_freeze` @@ -5078,7 +5076,7 @@ be associated with the resulting JavaScript function (which is returned in the `result` parameter) and freed whenever the function is garbage-collected by passing both the JavaScript function and the data to [`napi_add_finalizer`][]. -JavaScript `Function`s are described in [Section 19.2][] of the ECMAScript +JavaScript `Function`s are described in [Section Function objects][] of the ECMAScript Language Specification. ### `napi_get_cb_info` @@ -6148,7 +6146,7 @@ increased the external memory. ## Promises Node-API provides facilities for creating `Promise` objects as described in -[Section 25.4][] of the ECMA specification. It implements promises as a pair of +[Section Promise objects][] of the ECMA specification. It implements promises as a pair of objects. When a promise is created by `napi_create_promise()`, a "deferred" object is created and returned alongside the `Promise`. The deferred object is bound to the created `Promise` and is the only means to resolve or reject the @@ -6734,7 +6732,7 @@ the add-on's file name during loading. [C++ Addons]: addons.md [CMake]: https://cmake.org [CMake.js]: https://github.com/cmake-js/cmake-js -[ECMAScript Language Specification]: https://tc39.github.io/ecma262/ +[ECMAScript Language Specification]: https://tc39.es/ecma262/ [Error handling]: #error-handling [GCC]: https://gcc.gnu.org [GYP]: https://gyp.gsrc.io @@ -6744,40 +6742,39 @@ the add-on's file name during loading. [Node-API Media]: https://github.com/nodejs/abi-stable-node/blob/HEAD/node-api-media.md [Object lifetime management]: #object-lifetime-management [Object wrap]: #object-wrap -[Section 12.10.4]: https://tc39.github.io/ecma262/#sec-instanceofoperator -[Section 12.5.5]: https://tc39.github.io/ecma262/#sec-typeof-operator -[Section 19.2]: https://tc39.github.io/ecma262/#sec-function-objects -[Section 19.4]: https://tc39.github.io/ecma262/#sec-symbol-objects -[Section 20.3]: https://tc39.github.io/ecma262/#sec-date-objects -[Section 22.1]: https://tc39.github.io/ecma262/#sec-array-objects -[Section 22.1.4.1]: https://tc39.github.io/ecma262/#sec-properties-of-array-instances-length -[Section 22.2]: https://tc39.github.io/ecma262/#sec-typedarray-objects -[Section 24.1]: https://tc39.github.io/ecma262/#sec-arraybuffer-objects -[Section 24.1.1.2]: https://tc39.es/ecma262/#sec-isdetachedbuffer -[Section 24.1.1.3]: https://tc39.es/ecma262/#sec-detacharraybuffer -[Section 24.3]: https://tc39.github.io/ecma262/#sec-dataview-objects -[Section 25.4]: https://tc39.github.io/ecma262/#sec-promise-objects -[Section 6]: https://tc39.github.io/ecma262/#sec-ecmascript-data-types-and-values -[Section 6.1]: https://tc39.github.io/ecma262/#sec-ecmascript-language-types -[Section 6.1.4]: https://tc39.github.io/ecma262/#sec-ecmascript-language-types-string-type -[Section 6.1.6]: https://tc39.github.io/ecma262/#sec-ecmascript-language-types-number-type -[Section 6.1.7]: https://tc39.github.io/ecma262/#sec-object-type -[Section 6.1.7.1]: https://tc39.github.io/ecma262/#table-2 -[Section 7]: https://tc39.github.io/ecma262/#sec-abstract-operations -[Section 7.1.13]: https://tc39.github.io/ecma262/#sec-toobject -[Section 7.1.2]: https://tc39.github.io/ecma262/#sec-toboolean -[Section 7.1.3]: https://tc39.github.io/ecma262/#sec-tonumber -[Section 7.2.14]: https://tc39.github.io/ecma262/#sec-strict-equality-comparison -[Section 7.2.2]: https://tc39.github.io/ecma262/#sec-isarray -[Section 8.7]: https://tc39.es/ecma262/#sec-agents -[Section 9.1.6]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc +[Section Agents]: https://tc39.es/ecma262/#sec-agents +[Section Array instance length]: https://tc39.es/ecma262/#sec-properties-of-array-instances-length +[Section Array objects]: https://tc39.es/ecma262/#sec-array-objects +[Section ArrayBuffer objects]: https://tc39.es/ecma262/#sec-arraybuffer-objects +[Section DataView objects]: https://tc39.es/ecma262/#sec-dataview-objects +[Section Date objects]: https://tc39.es/ecma262/#sec-date-objects +[Section DefineOwnProperty]: https://tc39.es/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots-defineownproperty-p-desc +[Section Function objects]: https://tc39.es/ecma262/#sec-function-objects +[Section IsArray]: https://tc39.es/ecma262/#sec-isarray +[Section IsStrctEqual]: https://tc39.es/ecma262/#sec-strict-equality-comparison +[Section Promise objects]: https://tc39.es/ecma262/#sec-promise-objects +[Section ToBoolean]: https://tc39.es/ecma262/#sec-toboolean +[Section ToNumber]: https://tc39.es/ecma262/#sec-tonumber +[Section ToObject]: https://tc39.es/ecma262/#sec-toobject +[Section ToString]: https://tc39.es/ecma262/#sec-tostring +[Section TypedArray objects]: https://tc39.es/ecma262/#sec-typedarray-objects +[Section detachArrayBuffer]: https://tc39.es/ecma262/#sec-detacharraybuffer +[Section instanceof operator]: https://tc39.es/ecma262/#sec-instanceofoperator +[Section isDetachedBuffer]: https://tc39.es/ecma262/#sec-isdetachedbuffer +[Section language types]: https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +[Section number type]: https://tc39.es/ecma262/#sec-ecmascript-language-types-number-type +[Section object type]: https://tc39.es/ecma262/#sec-object-type +[Section property attributes]: https://tc39.es/ecma262/#sec-property-attributes +[Section string type]: https://tc39.es/ecma262/#sec-ecmascript-language-types-string-type +[Section symbol type]: https://tc39.es/ecma262/#sec-ecmascript-language-types-symbol-type +[Section typeof operator]: https://tc39.es/ecma262/#sec-typeof-operator [Travis CI]: https://travis-ci.org [Visual Studio]: https://visualstudio.microsoft.com [Working with JavaScript properties]: #working-with-javascript-properties [Xcode]: https://developer.apple.com/xcode/ [`'uncaughtException'`]: process.md#event-uncaughtexception -[`Number.MAX_SAFE_INTEGER`]: https://tc39.github.io/ecma262/#sec-number.max_safe_integer -[`Number.MIN_SAFE_INTEGER`]: https://tc39.github.io/ecma262/#sec-number.min_safe_integer +[`Number.MAX_SAFE_INTEGER`]: https://tc39.es/ecma262/#sec-number.max_safe_integer +[`Number.MIN_SAFE_INTEGER`]: https://tc39.es/ecma262/#sec-number.min_safe_integer [`Worker`]: worker_threads.md#class-worker [`async_hooks.executionAsyncResource()`]: async_hooks.md#async_hooksexecutionasyncresource [`build_with_cmake`]: https://github.com/nodejs/node-addon-examples/tree/main/src/8-tooling/build_with_cmake diff --git a/doc/api/net.md b/doc/api/net.md index 5384ad7a34c42c..5385856ae8c83b 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -179,6 +179,38 @@ added: v22.13.0 * `value` {any} Any JS value * Returns `true` if the `value` is a `net.BlockList`. +### `blockList.fromJSON(value)` + +> Stability: 1 - Experimental + + + +```js +const blockList = new net.BlockList(); +const data = [ + 'Subnet: IPv4 192.168.1.0/24', + 'Address: IPv4 10.0.0.5', + 'Range: IPv4 192.168.2.1-192.168.2.10', + 'Range: IPv4 10.0.0.1-10.0.0.10', +]; +blockList.fromJSON(data); +blockList.fromJSON(JSON.stringify(data)); +``` + +* `value` Blocklist.rules + +### `blockList.toJSON()` + +> Stability: 1 - Experimental + + + +* Returns Blocklist.rules + ## Class: `net.SocketAddress` -* Type {string} +* Type: {string} ### `socketaddress.family` @@ -222,7 +254,7 @@ added: - v14.18.0 --> -* Type {string} Either `'ipv4'` or `'ipv6'`. +* Type: {string} Either `'ipv4'` or `'ipv6'`. ### `socketaddress.flowlabel` @@ -232,7 +264,7 @@ added: - v14.18.0 --> -* Type {number} +* Type: {number} ### `socketaddress.port` @@ -242,7 +274,7 @@ added: - v14.18.0 --> -* Type {number} +* Type: {number} ### `SocketAddress.parse(input)` @@ -290,7 +322,7 @@ event is not emitted until all connections are ended. added: v0.1.90 --> -* {net.Socket} The connection object +* Type: {net.Socket} The connection object Emitted when a new connection is made. `socket` is an instance of `net.Socket`. @@ -301,7 +333,7 @@ Emitted when a new connection is made. `socket` is an instance of added: v0.1.90 --> -* {Error} +* Type: {Error} Emitted when an error occurs. Unlike [`net.Socket`][], the [`'close'`][] event will **not** be emitted directly following this event unless @@ -616,7 +648,7 @@ may cause the `net.Server` to also listen on the [unspecified IPv4 address][] added: v5.7.0 --> -* {boolean} Indicates whether or not the server is listening for connections. +* Type: {boolean} Indicates whether or not the server is listening for connections. ### `server.maxConnections` @@ -629,7 +661,7 @@ changes: connections. Previously, it was interpreted as `Infinity`. --> -* {integer} +* Type: {integer} When the number of connections reaches the `server.maxConnections` threshold: @@ -646,7 +678,7 @@ with [`child_process.fork()`][]. added: v22.12.0 --> -* {boolean} +* Type: {boolean} Set this property to `true` to begin closing connections once the number of connections reaches the \[`server.maxConnections`]\[] threshold. This setting is only effective in cluster mode. @@ -814,7 +846,7 @@ in [`socket.connect(options)`][]. added: v0.1.90 --> -* {Buffer|string} +* Type: {Buffer|string} Emitted when data is received. The argument `data` will be a `Buffer` or `String`. Encoding of data is set by [`socket.setEncoding()`][]. @@ -855,7 +887,7 @@ FIN packet back). added: v0.1.90 --> -* {Error} +* Type: {Error} Emitted when an error occurs. The `'close'` event will be called directly following this event. @@ -926,7 +958,7 @@ added: - v18.18.0 --> -* {string\[]} +* Type: {string\[]} This property is only present if the family autoselection algorithm is enabled in [`socket.connect(options)`][] and it is an array of the addresses that have been attempted. @@ -944,7 +976,7 @@ deprecated: > Stability: 0 - Deprecated: Use [`writable.writableLength`][] instead. -* {integer} +* Type: {integer} This property shows the number of characters buffered for writing. The buffer may contain strings whose length after encoding is not yet known. So this number @@ -967,7 +999,7 @@ Users who experience large or growing `bufferSize` should attempt to added: v0.5.3 --> -* {integer} +* Type: {integer} The amount of received bytes. @@ -977,7 +1009,7 @@ The amount of received bytes. added: v0.5.3 --> -* {integer} +* Type: {integer} The amount of bytes sent. @@ -1136,7 +1168,7 @@ called with `{port: port, host: host}` as `options`. added: v6.1.0 --> -* {boolean} +* Type: {boolean} If `true`, [`socket.connect(options[, connectListener])`][`socket.connect(options)`] was @@ -1162,7 +1194,7 @@ See [`writable.destroy()`][] for further details. ### `socket.destroyed` -* {boolean} Indicates if the connection is destroyed or not. Once a +* Type: {boolean} Indicates if the connection is destroyed or not. Once a connection is destroyed no further data can be transferred using it. See [`writable.destroyed`][] for further details. @@ -1199,7 +1231,7 @@ See [`writable.end()`][] for further details. added: v0.9.6 --> -* {string} +* Type: {string} The string representation of the local IP address the remote client is connecting on. For example, in a server listening on `'0.0.0.0'`, if a client @@ -1212,7 +1244,7 @@ connects on `'192.168.1.1'`, the value of `socket.localAddress` would be added: v0.9.6 --> -* {integer} +* Type: {integer} The numeric representation of the local port. For example, `80` or `21`. @@ -1224,7 +1256,7 @@ added: - v16.18.0 --> -* {string} +* Type: {string} The string representation of the local IP family. `'IPv4'` or `'IPv6'`. @@ -1243,7 +1275,7 @@ added: - v10.16.0 --> -* {boolean} +* Type: {boolean} This is `true` if the socket is not connected yet, either because `.connect()` has not yet been called or because it is still in the process of connecting @@ -1267,7 +1299,7 @@ If the socket is `ref`ed calling `ref` again will have no effect. added: v0.5.10 --> -* {string} +* Type: {string} The string representation of the remote IP address. For example, `'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if @@ -1279,7 +1311,7 @@ the socket is destroyed (for example, if the client disconnected). added: v0.11.14 --> -* {string} +* Type: {string} The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. Value may be `undefined` if the socket is destroyed (for example, if the client disconnected). @@ -1290,7 +1322,7 @@ the socket is destroyed (for example, if the client disconnected). added: v0.5.10 --> -* {integer} +* Type: {integer} The numeric representation of the remote port. For example, `80` or `21`. Value may be `undefined` if the socket is destroyed (for example, if the client disconnected). @@ -1421,7 +1453,7 @@ The optional `callback` parameter will be added as a one-time listener for the added: v10.7.0 --> -* {number|undefined} +* Type: {number|undefined} The socket timeout in milliseconds as set by [`socket.setTimeout()`][]. It is `undefined` if a timeout has not been set. @@ -1468,7 +1500,7 @@ information. added: v0.5.0 --> -* {string} +* Type: {string} This property represents the state of the connection as a string. diff --git a/doc/api/os.md b/doc/api/os.md index a367a8c4aa0234..882deab45042d8 100644 --- a/doc/api/os.md +++ b/doc/api/os.md @@ -23,7 +23,7 @@ const os = require('node:os'); added: v0.7.8 --> -* {string} +* Type: {string} The operating system-specific end-of-line marker. @@ -66,7 +66,7 @@ The return value is equivalent to [`process.arch`][]. added: v6.3.0 --> -* {Object} +* Type: {Object} Contains commonly used operating system-specific constants for error codes, process signals, and so on. The specific constants defined are described in @@ -161,7 +161,7 @@ added: - v14.18.0 --> -* {string} +* Type: {string} The platform-specific file path of the null device. diff --git a/doc/api/packages.md b/doc/api/packages.md index 08fd12a4e19c87..af25beb82a7fe8 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -1103,7 +1103,7 @@ changes: description: Implement conditional exports. --> -* Type: {Object} | {string} | {string\[]} +* Type: {Object|string|string\[]} ```json { diff --git a/doc/api/path.md b/doc/api/path.md index e26506a8a47b33..9111684633eedd 100644 --- a/doc/api/path.md +++ b/doc/api/path.md @@ -114,7 +114,7 @@ and is not a string. added: v0.9.3 --> -* {string} +* Type: {string} Provides the platform-specific path delimiter: @@ -499,7 +499,7 @@ changes: description: Exposed as `require('path/posix')`. --> -* {Object} +* Type: {Object} The `path.posix` property provides access to POSIX specific implementations of the `path` methods. @@ -593,7 +593,7 @@ A [`TypeError`][] is thrown if any of the arguments is not a string. added: v0.7.9 --> -* {string} +* Type: {string} Provides the platform-specific path segment separator: @@ -644,7 +644,7 @@ changes: description: Exposed as `require('path/win32')`. --> -* {Object} +* Type: {Object} The `path.win32` property provides access to Windows-specific implementations of the `path` methods. diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index fccaac5e730ca6..585f42053369dc 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -369,7 +369,7 @@ Performance Timeline manually with `performance.clearMeasures`. added: v8.5.0 --> -* {PerformanceNodeTiming} +* Type: {PerformanceNodeTiming} _This property is an extension by Node.js. It is not available in Web browsers._ @@ -414,7 +414,7 @@ By default the max buffer size is set to 250. added: v8.5.0 --> -* {number} +* Type: {number} The [`timeOrigin`][] specifies the high resolution millisecond timestamp at which the current `node` process began, measured in Unix time. @@ -541,7 +541,7 @@ changes: `PerformanceEntry` object as the receiver. --> -* {number} +* Type: {number} The total number of milliseconds elapsed for this entry. This value will not be meaningful for all Performance Entry types. @@ -557,7 +557,7 @@ changes: `PerformanceEntry` object as the receiver. --> -* {string} +* Type: {string} The type of the performance entry. It may be one of: @@ -583,7 +583,7 @@ changes: `PerformanceEntry` object as the receiver. --> -* {string} +* Type: {string} The name of the performance entry. @@ -598,7 +598,7 @@ changes: `PerformanceEntry` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp marking the starting time of the Performance Entry. @@ -626,7 +626,7 @@ changes: `PerformanceMark` object as the receiver. --> -* {any} +* Type: {any} Additional detail specified when creating with `Performance.mark()` method. @@ -655,7 +655,7 @@ changes: `PerformanceMeasure` object as the receiver. --> -* {any} +* Type: {any} Additional detail specified when creating with `Performance.measure()` method. @@ -684,7 +684,7 @@ changes: `PerformanceNodeEntry` object as the receiver. --> -* {any} +* Type: {any} Additional detail specific to the `entryType`. @@ -703,7 +703,7 @@ changes: > Stability: 0 - Deprecated: Use `performanceNodeEntry.detail` instead. -* {number} +* Type: {number} When `performanceEntry.entryType` is equal to `'gc'`, the `performance.flags` property contains additional information about garbage collection operation. @@ -730,7 +730,7 @@ changes: > Stability: 0 - Deprecated: Use `performanceNodeEntry.detail` instead. -* {number} +* Type: {number} When `performanceEntry.entryType` is equal to `'gc'`, the `performance.kind` property identifies the type of garbage collection operation that occurred. @@ -871,7 +871,7 @@ is not exposed to users. added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the Node.js process completed bootstrapping. If bootstrapping has not yet finished, the property @@ -883,7 +883,7 @@ has the value of -1. added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the Node.js environment was initialized. @@ -896,7 +896,7 @@ added: - v12.19.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp of the amount of time the event loop has been idle within the event loop's event provider (e.g. `epoll_wait`). This @@ -910,7 +910,7 @@ value of 0. added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the Node.js event loop exited. If the event loop has not yet exited, the property has the value of -1. @@ -922,7 +922,7 @@ It can only have a value of not -1 in a handler of the [`'exit'`][] event. added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the Node.js event loop started. If the event loop has not yet started (e.g., in the first tick of the @@ -934,7 +934,7 @@ main script), the property has the value of -1. added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the Node.js process was initialized. @@ -979,7 +979,7 @@ setImmediate(() => { added: v8.5.0 --> -* {number} +* Type: {number} The high resolution millisecond timestamp at which the V8 platform was initialized. @@ -1012,7 +1012,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp at immediately before dispatching the `fetch` request. If the resource is not intercepted by a worker the property @@ -1031,7 +1031,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp that represents the start time of the fetch which initiates the redirect. @@ -1049,7 +1049,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp that will be created immediately after receiving the last byte of the response of the last redirect. @@ -1067,7 +1067,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp immediately before the Node.js starts to fetch the resource. @@ -1085,7 +1085,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp immediately before the Node.js starts the domain name lookup for the resource. @@ -1103,7 +1103,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately after the Node.js finished the domain name lookup for the resource. @@ -1121,7 +1121,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately before Node.js starts to establish the connection to the server to retrieve @@ -1140,7 +1140,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately after Node.js finishes establishing the connection to the server to retrieve @@ -1159,7 +1159,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately before Node.js starts the handshake process to secure the current connection. @@ -1177,7 +1177,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately before Node.js receives the first byte of the response from the server. @@ -1195,7 +1195,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} The high resolution millisecond timestamp representing the time immediately after Node.js receives the last byte of the resource or immediately before @@ -1214,7 +1214,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} A number representing the size (in octets) of the fetched resource. The size includes the response header fields plus the response payload body. @@ -1232,7 +1232,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} A number representing the size (in octets) received from the fetch (HTTP or cache), of the payload body, before removing any applied @@ -1251,7 +1251,7 @@ changes: `PerformanceResourceTiming` object as the receiver. --> -* {number} +* Type: {number} A number representing the size (in octets) received from the fetch (HTTP or cache), of the message body, after removing any applied @@ -1285,7 +1285,7 @@ added: v8.5.0 added: v16.0.0 --> -* {string\[]} +* Type: {string\[]} Get supported types. @@ -1787,7 +1787,7 @@ added: - v16.14.0 --> -* {number} +* Type: {number} The number of samples recorded by the histogram. @@ -1799,7 +1799,7 @@ added: - v16.14.0 --> -* {bigint} +* Type: {bigint} The number of samples recorded by the histogram. @@ -1809,7 +1809,7 @@ The number of samples recorded by the histogram. added: v11.10.0 --> -* {number} +* Type: {number} The number of times the event loop delay exceeded the maximum 1 hour event loop delay threshold. @@ -1822,7 +1822,7 @@ added: - v16.14.0 --> -* {bigint} +* Type: {bigint} The number of times the event loop delay exceeded the maximum 1 hour event loop delay threshold. @@ -1833,7 +1833,7 @@ loop delay threshold. added: v11.10.0 --> -* {number} +* Type: {number} The maximum recorded event loop delay. @@ -1845,7 +1845,7 @@ added: - v16.14.0 --> -* {bigint} +* Type: {bigint} The maximum recorded event loop delay. @@ -1855,7 +1855,7 @@ The maximum recorded event loop delay. added: v11.10.0 --> -* {number} +* Type: {number} The mean of the recorded event loop delays. @@ -1865,7 +1865,7 @@ The mean of the recorded event loop delays. added: v11.10.0 --> -* {number} +* Type: {number} The minimum recorded event loop delay. @@ -1877,7 +1877,7 @@ added: - v16.14.0 --> -* {bigint} +* Type: {bigint} The minimum recorded event loop delay. @@ -1911,7 +1911,7 @@ Returns the value at the given percentile. added: v11.10.0 --> -* {Map} +* Type: {Map} Returns a `Map` object detailing the accumulated percentile distribution. @@ -1923,7 +1923,7 @@ added: - v16.14.0 --> -* {Map} +* Type: {Map} Returns a `Map` object detailing the accumulated percentile distribution. @@ -1941,7 +1941,7 @@ Resets the collected histogram data. added: v11.10.0 --> -* {number} +* Type: {number} The standard deviation of the recorded event loop delays. diff --git a/doc/api/process.md b/doc/api/process.md index 144187a24b7cc3..eb7b9b3265d529 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -825,7 +825,7 @@ This feature is not available in [`Worker`][] threads. added: v10.10.0 --> -* {Set} +* Type: {Set} The `process.allowedNodeEnvironmentFlags` property is a special, read-only `Set` of flags allowable within the [`NODE_OPTIONS`][] @@ -888,7 +888,7 @@ contain what _would have_ been allowable. added: v0.5.0 --> -* {string} +* Type: {string} The operating system CPU architecture for which the Node.js binary was compiled. Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`, @@ -912,7 +912,7 @@ console.log(`This processor architecture is ${arch}`); added: v0.1.27 --> -* {string\[]} +* Type: {string\[]} The `process.argv` property returns an array containing the command-line arguments passed when the Node.js process was launched. The first element will @@ -963,7 +963,7 @@ Would generate the output: added: v6.4.0 --> -* {string} +* Type: {string} The `process.argv0` property stores a read-only copy of the original value of `argv[0]` passed when Node.js starts. @@ -986,7 +986,7 @@ changes: description: Change stability index for this feature from Experimental to Stable. --> -* {number} +* Type: {number} Gets the amount of free memory that is still available to the process (in bytes). @@ -1004,7 +1004,7 @@ changes: description: The object no longer accidentally exposes native C++ bindings. --> -* {Object} +* Type: {Object} If the Node.js process was spawned with an IPC channel (see the [Child Process][] documentation), the `process.channel` @@ -1088,7 +1088,7 @@ changes: description: Modifying process.config has been deprecated. --> -* {Object} +* Type: {Object} The `process.config` property returns a frozen `Object` containing the JavaScript representation of the configure options used to compile the current @@ -1131,7 +1131,7 @@ An example of the possible output looks like: added: v0.7.2 --> -* {boolean} +* Type: {boolean} If the Node.js process is spawned with an IPC channel (see the [Child Process][] and [Cluster][] documentation), the `process.connected` property will return @@ -1156,7 +1156,7 @@ changes: description: Aligned return value with `uv_get_constrained_memory`. --> -* {number} +* Type: {number} Gets the amount of memory available to the process (in bytes) based on limits imposed by the OS. If there is no such constraint, or the constraint @@ -1243,7 +1243,7 @@ console.log(`Current directory: ${cwd()}`); added: v0.7.2 --> -* {number} +* Type: {number} The port used by the Node.js debugger when enabled. @@ -1591,7 +1591,7 @@ changes: description: Implicit conversion of variable value to string is deprecated. --> -* {Object} +* Type: {Object} The `process.env` property returns an object containing the user environment. See environ(7). @@ -1719,7 +1719,7 @@ unlike the main thread. added: v0.7.7 --> -* {string\[]} +* Type: {string\[]} The `process.execArgv` property returns the set of Node.js-specific command-line options passed when the Node.js process was launched. These options do not @@ -1755,7 +1755,7 @@ threads with this property. added: v0.1.100 --> -* {string} +* Type: {string} The `process.execPath` property returns the absolute pathname of the executable that started the Node.js process. Symbolic links, if any, are resolved. @@ -1912,7 +1912,7 @@ changes: represents an integer. --> -* {integer|string|null|undefined} The exit code. For string type, only +* Type: {integer|string|null|undefined} The exit code. For string type, only integer strings (e.g.,'1') are allowed. **Default:** `undefined`. A number which will be the process exit code, when the process either @@ -1948,7 +1948,7 @@ $ node --input-type=module -e 'process.exitCode = 9; await new Promise(() => {}) added: v12.0.0 --> -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build is caching builtin modules. @@ -1958,7 +1958,7 @@ A boolean value that is `true` if the current Node.js build is caching builtin m added: v0.5.5 --> -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build is a debug build. @@ -1968,7 +1968,7 @@ A boolean value that is `true` if the current Node.js build is a debug build. added: v11.10.0 --> -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes the inspector. @@ -1982,7 +1982,7 @@ deprecated: v22.13.0 > Stability: 0 - Deprecated. This property is always true, and any checks based on it are > redundant. -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for IPv6. @@ -1994,7 +1994,7 @@ Since all Node.js builds have IPv6 support, this value is always `true`. added: v22.10.0 --> -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build supports [loading ECMAScript modules using `require()`][]. @@ -2005,7 +2005,7 @@ A boolean value that is `true` if the current Node.js build supports added: v0.5.3 --> -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for TLS. @@ -2018,7 +2018,7 @@ deprecated: v22.13.0 > Stability: 0 - Deprecated. Use `process.features.tls` instead. -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for ALPN in TLS. @@ -2034,7 +2034,7 @@ deprecated: v22.13.0 > Stability: 0 - Deprecated. Use `process.features.tls` instead. -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for OCSP in TLS. @@ -2050,7 +2050,7 @@ deprecated: v22.13.0 > Stability: 0 - Deprecated. Use `process.features.tls` instead. -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for SNI in TLS. @@ -2065,7 +2065,7 @@ added: v22.10.0 > Stability: 1.1 - Active development -* {boolean|string} +* Type: {boolean|string} A value that is `"strip"` by default, `"transform"` if Node.js is run with `--experimental-transform-types`, and `false` if @@ -2081,7 +2081,7 @@ deprecated: v22.13.0 > Stability: 0 - Deprecated. This property is always true, and any checks based on it are > redundant. -* {boolean} +* Type: {boolean} A boolean value that is `true` if the current Node.js build includes support for libuv. @@ -2778,7 +2778,7 @@ deprecated: v14.0.0 > Stability: 0 - Deprecated: Use [`require.main`][] instead. -* {Object} +* Type: {Object} The `process.mainModule` property provides an alternative way of retrieving [`require.main`][]. The difference is that if the main module changes at @@ -2861,6 +2861,13 @@ The `process.memoryUsage()` method iterates over each page to gather information about memory usage which might be slow depending on the program memory allocations. +### A note on process memoryUsage + +On Linux or other systems where glibc is commonly used, an application may have sustained +`rss` growth despite stable `heapTotal` due to fragmentation caused by the glibc `malloc` +implementation. See [nodejs/node#21973][] on how to switch to an alternative `malloc` +implementation to address the performance issue. + ## `process.memoryUsage.rss()` -* {boolean} +* Type: {boolean} The `process.noDeprecation` property indicates whether the `--no-deprecation` flag is set on the current Node.js process. See the documentation for @@ -3151,7 +3158,7 @@ flag's behavior. added: v20.0.0 --> -* {Object} +* Type: {Object} This API is available through the [`--permission`][] flag. @@ -3198,7 +3205,7 @@ process.permission.has('fs.read'); added: v0.1.15 --> -* {integer} +* Type: {integer} The `process.pid` property returns the PID of the process. @@ -3220,7 +3227,7 @@ console.log(`This process is pid ${pid}`); added: v0.1.16 --> -* {string} +* Type: {string} The `process.platform` property returns a string identifying the operating system platform for which the Node.js binary was compiled. @@ -3260,7 +3267,7 @@ added: - v6.13.0 --> -* {integer} +* Type: {integer} The `process.ppid` property returns the PID of the parent of the current process. @@ -3306,7 +3313,7 @@ changes: description: The `lts` property is now supported. --> -* {Object} +* Type: {Object} The `process.release` property returns an `Object` containing metadata related to the current release, including URLs for the source tarball and headers-only @@ -3362,7 +3369,7 @@ changes: description: This API is no longer experimental. --> -* {Object} +* Type: {Object} `process.report` is an object whose methods are used to generate diagnostic reports for the current process. Additional documentation is available in the @@ -3376,7 +3383,7 @@ added: - v12.17.0 --> -* {boolean} +* Type: {boolean} Write reports in a compact format, single-line JSON, more easily consumable by log processing systems than the default multi-line format designed for @@ -3406,7 +3413,7 @@ changes: description: This API is no longer experimental. --> -* {string} +* Type: {string} Directory where the report is written. The default value is the empty string, indicating that reports are written to the current working directory of the @@ -3436,7 +3443,7 @@ changes: description: This API is no longer experimental. --> -* {string} +* Type: {string} Filename where the report is written. If set to the empty string, the output filename will be comprised of a timestamp, PID, and sequence number. The default @@ -3514,7 +3521,7 @@ changes: description: This API is no longer experimental. --> -* {boolean} +* Type: {boolean} If `true`, a diagnostic report is generated on fatal errors, such as out of memory errors or failed C++ assertions. @@ -3543,7 +3550,7 @@ changes: description: This API is no longer experimental. --> -* {boolean} +* Type: {boolean} If `true`, a diagnostic report is generated when the process receives the signal specified by `process.report.signal`. @@ -3572,7 +3579,7 @@ changes: description: This API is no longer experimental. --> -* {boolean} +* Type: {boolean} If `true`, a diagnostic report is generated on uncaught exception. @@ -3594,7 +3601,7 @@ console.log(`Report on exception: ${report.reportOnUncaughtException}`); added: v22.13.0 --> -* {boolean} +* Type: {boolean} If `true`, a diagnostic report is generated without the environment variables. @@ -3610,7 +3617,7 @@ changes: description: This API is no longer experimental. --> -* {string} +* Type: {string} The signal used to trigger the creation of a diagnostic report. Defaults to `'SIGUSR2'`. @@ -4082,14 +4089,14 @@ added: > Stability: 1 - Experimental: Use [`module.getSourceMapsSupport()`][] instead. -* {boolean} +* Type: {boolean} The `process.sourceMapsEnabled` property returns whether the [Source Map][] support for stack traces is enabled. ## `process.stderr` -* {Stream} +* Type: {Stream} The `process.stderr` property returns a stream connected to `stderr` (fd `2`). It is a [`net.Socket`][] (which is a [Duplex][] @@ -4101,7 +4108,7 @@ a [Writable][] stream. ### `process.stderr.fd` -* {number} +* Type: {number} This property refers to the value of underlying file descriptor of `process.stderr`. The value is fixed at `2`. In [`Worker`][] threads, @@ -4109,7 +4116,7 @@ this field does not exist. ## `process.stdin` -* {Stream} +* Type: {Stream} The `process.stdin` property returns a stream connected to `stdin` (fd `0`). It is a [`net.Socket`][] (which is a [Duplex][] @@ -4128,7 +4135,7 @@ must call `process.stdin.resume()` to read from it. Note also that calling ### `process.stdin.fd` -* {number} +* Type: {number} This property refers to the value of underlying file descriptor of `process.stdin`. The value is fixed at `0`. In [`Worker`][] threads, @@ -4136,7 +4143,7 @@ this field does not exist. ## `process.stdout` -* {Stream} +* Type: {Stream} The `process.stdout` property returns a stream connected to `stdout` (fd `1`). It is a [`net.Socket`][] (which is a [Duplex][] @@ -4162,7 +4169,7 @@ stdin.pipe(stdout); ### `process.stdout.fd` -* {number} +* Type: {number} This property refers to the value of underlying file descriptor of `process.stdout`. The value is fixed at `1`. In [`Worker`][] threads, @@ -4222,7 +4229,7 @@ See the [TTY][] documentation for more information. added: v0.9.12 --> -* {boolean} +* Type: {boolean} The initial value of `process.throwDeprecation` indicates whether the `--throw-deprecation` flag is set on the current Node.js process. @@ -4247,13 +4254,32 @@ Thrown: [DeprecationWarning: test] { name: 'DeprecationWarning' } ``` +## `process.threadCpuUsage([previousValue])` + + + +* `previousValue` {Object} A previous return value from calling + `process.cpuUsage()` +* Returns: {Object} + * `user` {integer} + * `system` {integer} + +The `process.threadCpuUsage()` method returns the user and system CPU time usage of +the current worker thread, in an object with properties `user` and `system`, whose +values are microsecond values (millionth of a second). + +The result of a previous call to `process.threadCpuUsage()` can be passed as the +argument to the function, to get a diff reading. + ## `process.title` -* {string} +* Type: {string} The `process.title` property returns the current process title (i.e. returns the current value of `ps`). Assigning a new value to `process.title` modifies @@ -4278,7 +4304,7 @@ Services Manager. added: v0.8.0 --> -* {boolean} +* Type: {boolean} The `process.traceDeprecation` property indicates whether the `--trace-deprecation` flag is set on the current Node.js process. See the @@ -4378,7 +4404,7 @@ seconds. added: v0.1.3 --> -* {string} +* Type: {string} The `process.version` property contains the Node.js version string. @@ -4412,7 +4438,7 @@ changes: description: The `icu` property is now supported. --> -* {Object} +* Type: {Object} The `process.versions` property returns an object listing the version strings of Node.js and its dependencies. `process.versions.modules` indicates the current @@ -4575,6 +4601,7 @@ cases: [debugger]: debugger.md [deprecation code]: deprecations.md [loading ECMAScript modules using `require()`]: modules.md#loading-ecmascript-modules-using-require +[nodejs/node#21973]: https://github.com/nodejs/node/issues/21973 [note on process I/O]: #a-note-on-process-io [process.cpuUsage]: #processcpuusagepreviousvalue [process_emit_warning]: #processemitwarningwarning-type-code-ctor diff --git a/doc/api/punycode.md b/doc/api/punycode.md index f2e9bc09fbbb00..31d331926b7758 100644 --- a/doc/api/punycode.md +++ b/doc/api/punycode.md @@ -155,7 +155,7 @@ punycode.ucs2.encode([0x1D306]); // '\uD834\uDF06' added: v0.6.1 --> -* {string} +* Type: {string} Returns a string identifying the current [Punycode.js][] version number. diff --git a/doc/api/readline.md b/doc/api/readline.md index 05f9e169219743..cc91846f609a49 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -435,7 +435,7 @@ changes: description: Value will always be a string, never undefined. --> -* {string} +* Type: {string} The current input data being processed by node. @@ -471,7 +471,7 @@ process.stdin.on('keypress', (c, k) => { added: v0.1.98 --> -* {number|undefined} +* Type: {number|undefined} The cursor position relative to `rl.line`. diff --git a/doc/api/repl.md b/doc/api/repl.md index fae055a656fad3..ea585f261992e0 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -671,7 +671,7 @@ deprecated: v22.16.0 > Stability: 0 - Deprecated. Use [`module.builtinModules`][] instead. -* {string\[]} +* Type: {string\[]} A list of the names of some Node.js modules, e.g., `'http'`. diff --git a/doc/api/sqlite.md b/doc/api/sqlite.md index 61ceab3a22b739..754242ffa6deaa 100644 --- a/doc/api/sqlite.md +++ b/doc/api/sqlite.md @@ -308,7 +308,7 @@ wrapper around [`sqlite3_create_function_v2()`][]. added: v22.15.0 --> -* {boolean} Whether the database is currently open or not. +* Type: {boolean} Whether the database is currently open or not. ### `database.isTransaction` @@ -316,7 +316,7 @@ added: v22.15.0 added: v22.16.0 --> -* {boolean} Whether the database is currently within a transaction. This method +* Type: {boolean} Whether the database is currently within a transaction. This method is a wrapper around [`sqlite3_get_autocommit()`][]. ### `database.open()` @@ -502,19 +502,19 @@ added: v22.16.0 * Returns: {Array} An array of objects. Each object corresponds to a column in the prepared statement, and contains the following properties: - * `column`: {string|null} The unaliased name of the column in the origin + * `column` {string|null} The unaliased name of the column in the origin table, or `null` if the column is the result of an expression or subquery. This property is the result of [`sqlite3_column_origin_name()`][]. - * `database`: {string|null} The unaliased name of the origin database, or + * `database` {string|null} The unaliased name of the origin database, or `null` if the column is the result of an expression or subquery. This property is the result of [`sqlite3_column_database_name()`][]. - * `name`: {string} The name assigned to the column in the result set of a + * `name` {string} The name assigned to the column in the result set of a `SELECT` statement. This property is the result of [`sqlite3_column_name()`][]. - * `table`: {string|null} The unaliased name of the origin table, or `null` if + * `table` {string|null} The unaliased name of the origin table, or `null` if the column is the result of an expression or subquery. This property is the result of [`sqlite3_column_table_name()`][]. - * `type`: {string|null} The declared data type of the column, or `null` if the + * `type` {string|null} The declared data type of the column, or `null` if the column is the result of an expression or subquery. This property is the result of [`sqlite3_column_decltype()`][]. @@ -527,7 +527,7 @@ prepared statement. added: v22.5.0 --> -* {string} The source SQL expanded to include parameter values. +* Type: {string} The source SQL expanded to include parameter values. The source SQL text of the prepared statement with parameter placeholders replaced by the values that were used during the most recent @@ -596,12 +596,12 @@ changes: * `...anonymousParameters` {null|number|bigint|string|Buffer|TypedArray|DataView} Zero or more values to bind to anonymous parameters. * Returns: {Object} - * `changes`: {number|bigint} The number of rows modified, inserted, or deleted + * `changes` {number|bigint} The number of rows modified, inserted, or deleted by the most recently completed `INSERT`, `UPDATE`, or `DELETE` statement. This field is either a number or a `BigInt` depending on the prepared statement's configuration. This property is the result of [`sqlite3_changes64()`][]. - * `lastInsertRowid`: {number|bigint} The most recently inserted rowid. This + * `lastInsertRowid` {number|bigint} The most recently inserted rowid. This field is either a number or a `BigInt` depending on the prepared statement's configuration. This property is the result of [`sqlite3_last_insert_rowid()`][]. @@ -646,6 +646,17 @@ added: v22.15.0 By default, if an unknown name is encountered while binding parameters, an exception is thrown. This method allows unknown named parameters to be ignored. +### `statement.setReturnArrays(enabled)` + + + +* `enabled` {boolean} Enables or disables the return of query results as arrays. + +When enabled, query results returned by the `all()`, `get()`, and `iterate()` methods will be returned as arrays instead +of objects. + ### `statement.setReadBigInts(enabled)` -* {string} The source SQL used to create this prepared statement. +* Type: {string} The source SQL used to create this prepared statement. The source SQL text of the prepared statement. This property is a wrapper around [`sqlite3_sql()`][]. @@ -752,7 +763,7 @@ console.log('Backup completed', totalPagesTransferred); added: v22.13.0 --> -* {Object} +* Type: {Object} An object containing commonly used constants for SQLite operations. diff --git a/doc/api/stream.md b/doc/api/stream.md index 7bc15ae9a27cbb..2ca927466affd6 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -62,10 +62,10 @@ functions for streams that return `Promise` objects rather than using callbacks. The API is accessible via `require('node:stream/promises')` or `require('node:stream').promises`. -### `stream.pipeline(source[, ...transforms], destination[, options])` - ### `stream.pipeline(streams[, options])` +### `stream.pipeline(source[, ...transforms], destination[, options])` + -* {Error} +* Type: {Error} The `'error'` event is emitted if an error occurred while writing or piping data. The listener callback is passed a single `Error` argument when called. @@ -732,7 +732,7 @@ but instead implement [`writable._destroy()`][writable-_destroy]. added: v18.0.0 --> -* {boolean} +* Type: {boolean} Is `true` after `'close'` has been emitted. @@ -742,7 +742,7 @@ Is `true` after `'close'` has been emitted. added: v8.0.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`writable.destroy()`][writable-destroy] has been called. @@ -864,7 +864,7 @@ See also: [`writable.cork()`][]. added: v11.4.0 --> -* {boolean} +* Type: {boolean} Is `true` if it is safe to call [`writable.write()`][stream-write], which means the stream has not been destroyed, errored, or ended. @@ -881,7 +881,7 @@ changes: description: Marking the API stable. --> -* {boolean} +* Type: {boolean} Returns whether the stream was destroyed or errored before emitting `'finish'`. @@ -891,7 +891,7 @@ Returns whether the stream was destroyed or errored before emitting `'finish'`. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`writable.end()`][] has been called. This property does not indicate whether the data has been flushed, for this use @@ -905,7 +905,7 @@ added: - v12.16.0 --> -* {integer} +* Type: {integer} Number of times [`writable.uncork()`][stream-uncork] needs to be called in order to fully uncork the stream. @@ -917,7 +917,7 @@ added: v18.0.0 --> -* {Error} +* Type: {Error} Returns error if the stream has been destroyed with an error. @@ -927,7 +927,7 @@ Returns error if the stream has been destroyed with an error. added: v12.6.0 --> -* {boolean} +* Type: {boolean} Is set to `true` immediately before the [`'finish'`][] event is emitted. @@ -937,7 +937,7 @@ Is set to `true` immediately before the [`'finish'`][] event is emitted. added: v9.3.0 --> -* {number} +* Type: {number} Return the value of `highWaterMark` passed when creating this `Writable`. @@ -947,7 +947,7 @@ Return the value of `highWaterMark` passed when creating this `Writable`. added: v9.4.0 --> -* {number} +* Type: {number} This property contains the number of bytes (or objects) in the queue ready to be written. The value provides introspection data regarding @@ -961,7 +961,7 @@ added: - v14.17.0 --> -* {boolean} +* Type: {boolean} Is `true` if the stream's buffer has been full and stream will emit `'drain'`. @@ -971,7 +971,7 @@ Is `true` if the stream's buffer has been full and stream will emit `'drain'`. added: v12.3.0 --> -* {boolean} +* Type: {boolean} Getter for the property `objectMode` of a given `Writable` stream. @@ -1279,7 +1279,7 @@ readable.on('end', () => { added: v0.9.4 --> -* {Error} +* Type: {Error} The `'error'` event may be emitted by a `Readable` implementation at any time. Typically, this may occur if the underlying stream is unable to generate data @@ -1408,7 +1408,7 @@ Implementors should not override this method, but instead implement added: v18.0.0 --> -* {boolean} +* Type: {boolean} Is `true` after `'close'` has been emitted. @@ -1418,7 +1418,7 @@ Is `true` after `'close'` has been emitted. added: v8.0.0 --> -* {boolean} +* Type: {boolean} Is `true` after [`readable.destroy()`][readable-destroy] has been called. @@ -1628,7 +1628,7 @@ been emitted will return `null`. No runtime error will be raised. added: v11.4.0 --> -* {boolean} +* Type: {boolean} Is `true` if it is safe to call [`readable.read()`][stream-read], which means the stream has not been destroyed or emitted `'error'` or `'end'`. @@ -1643,7 +1643,7 @@ changes: description: Marking the API stable. --> -* {boolean} +* Type: {boolean} Returns whether the stream was destroyed or errored before emitting `'end'`. @@ -1659,7 +1659,7 @@ changes: description: Marking the API stable. --> -* {boolean} +* Type: {boolean} Returns whether `'data'` has been emitted. @@ -1669,7 +1669,7 @@ Returns whether `'data'` has been emitted. added: v12.7.0 --> -* {null|string} +* Type: {null|string} Getter for the property `encoding` of a given `Readable` stream. The `encoding` property can be set using the [`readable.setEncoding()`][] method. @@ -1680,7 +1680,7 @@ property can be set using the [`readable.setEncoding()`][] method. added: v12.9.0 --> -* {boolean} +* Type: {boolean} Becomes `true` when [`'end'`][] event is emitted. @@ -1691,7 +1691,7 @@ added: v18.0.0 --> -* {Error} +* Type: {Error} Returns error if the stream has been destroyed with an error. @@ -1701,7 +1701,7 @@ Returns error if the stream has been destroyed with an error. added: v9.4.0 --> -* {boolean} +* Type: {boolean} This property reflects the current state of a `Readable` stream as described in the [Three states][] section. @@ -1712,7 +1712,7 @@ in the [Three states][] section. added: v9.3.0 --> -* {number} +* Type: {number} Returns the value of `highWaterMark` passed when creating this `Readable`. @@ -1722,7 +1722,7 @@ Returns the value of `highWaterMark` passed when creating this `Readable`. added: v9.4.0 --> -* {number} +* Type: {number} This property contains the number of bytes (or objects) in the queue ready to be read. The value provides introspection data regarding @@ -1734,7 +1734,7 @@ the status of the `highWaterMark`. added: v12.3.0 --> -* {boolean} +* Type: {boolean} Getter for the property `objectMode` of a given `Readable` stream. @@ -2656,7 +2656,7 @@ Examples of `Duplex` streams include: added: v0.9.4 --> -* {boolean} +* Type: {boolean} If `false` then the stream will automatically end the writable side when the readable side ends. Set initially by the `allowHalfOpen` constructor option, diff --git a/doc/api/test.md b/doc/api/test.md index e6b44518823ebb..9a141297473229 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -1287,7 +1287,7 @@ changes: * `forceExit`: {boolean} Configures the test runner to exit the process once all known tests have finished executing even if the event loop would otherwise remain active. **Default:** `false`. - * `globPatterns`: {Array} An array containing the list of glob patterns to + * `globPatterns` {Array} An array containing the list of glob patterns to match test files. This option cannot be used together with `files`. **Default:** matching files from [test runner execution model][]. * `inspectPort` {number|Function} Sets inspector port of test child process. @@ -1300,7 +1300,7 @@ changes: `'process'`, each test file is run in a separate child process. If set to `'none'`, all test files run in the current process. **Default:** `'process'`. - * `only`: {boolean} If truthy, the test context will only run tests that + * `only` {boolean} If truthy, the test context will only run tests that have the `only` option set * `setup` {Function} A function that accepts the `TestsStream` instance and can be used to setup listeners before any tests are run. @@ -1816,7 +1816,7 @@ added: - v18.13.0 --> -* {Array} +* Type: {Array} A getter that returns a copy of the internal array used to track calls to the mock. Each entry in the array is an object with the following properties. diff --git a/doc/api/tls.md b/doc/api/tls.md index bff3e464232084..3c95d01564bf74 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -193,7 +193,7 @@ the selected cipher's digest). It will be called first on the client: -* hint: {string} optional message sent from the server to help the client +* `hint` {string} optional message sent from the server to help the client decide which identity to use during negotiation. Always `null` if TLS 1.3 is used. * Returns: {Object} in the form @@ -201,8 +201,8 @@ It will be called first on the client: Then on the server: -* socket: {tls.TLSSocket} the server socket instance, equivalent to `this`. -* identity: {string} identity parameter sent from the client. +* `socket` {tls.TLSSocket} the server socket instance, equivalent to `this`. +* `identity` {string} identity parameter sent from the client. * Returns: {Buffer|TypedArray|DataView} the PSK (or `null`). A return value of `null` stops the negotiation process and sends an @@ -1070,7 +1070,7 @@ property is set only when `tlsSocket.authorized === false`. added: v0.11.4 --> -* {boolean} +* Type: {boolean} This property is `true` if the peer certificate was signed by one of the CAs specified when creating the `tls.TLSSocket` instance, otherwise `false`. @@ -1421,7 +1421,7 @@ See the OpenSSL [`SSL_get_version`][] documentation for more information. added: v0.11.4 --> -* {Buffer} +* Type: {Buffer} Returns the TLS session data or `undefined` if no session was negotiated. On the client, the data can be provided to the `session` option of @@ -1452,7 +1452,7 @@ for more information. added: v0.11.4 --> -* {Buffer} +* Type: {Buffer} For a client, returns the TLS session ticket if one is available, or `undefined`. For a server, always returns `undefined`. @@ -1490,7 +1490,7 @@ See [Session Resumption][] for more information. added: v0.11.4 --> -* {string} +* Type: {string} Returns the string representation of the local IP address. @@ -1500,7 +1500,7 @@ Returns the string representation of the local IP address. added: v0.11.4 --> -* {integer} +* Type: {integer} Returns the numeric representation of the local port. @@ -1510,7 +1510,7 @@ Returns the numeric representation of the local port. added: v0.11.4 --> -* {string} +* Type: {string} Returns the string representation of the remote IP address. For example, `'74.125.127.100'` or `'2001:4860:a005::68'`. @@ -1521,7 +1521,7 @@ Returns the string representation of the remote IP address. For example, added: v0.11.4 --> -* {string} +* Type: {string} Returns the string representation of the remote IP family. `'IPv4'` or `'IPv6'`. @@ -1531,7 +1531,7 @@ Returns the string representation of the remote IP family. `'IPv4'` or `'IPv6'`. added: v0.11.4 --> -* {integer} +* Type: {integer} Returns the numeric representation of the remote port. For example, `443`. @@ -1729,7 +1729,7 @@ changes: verification fails; `err.code` contains the OpenSSL error code. **Default:** `true`. * `pskCallback` {Function} For TLS-PSK negotiation, see [Pre-shared keys][]. - * `ALPNProtocols`: {string\[]|Buffer\[]|TypedArray\[]|DataView\[]|Buffer| + * `ALPNProtocols` {string\[]|Buffer\[]|TypedArray\[]|DataView\[]|Buffer| TypedArray|DataView} An array of strings, `Buffer`s, `TypedArray`s, or `DataView`s, or a single `Buffer`, `TypedArray`, or `DataView` containing the supported ALPN @@ -1738,7 +1738,7 @@ changes: next protocol name. Passing an array is usually much simpler, e.g. `['http/1.1', 'http/1.0']`. Protocols earlier in the list have higher preference than those later. - * `servername`: {string} Server name for the SNI (Server Name Indication) TLS + * `servername` {string} Server name for the SNI (Server Name Indication) TLS extension. It is the name of the host being connected to, and must be a host name, and not an IP address. It can be used by a multi-homed server to choose the correct certificate to present to the client, see the @@ -1754,7 +1754,7 @@ changes: TLS connection. When a server offers a DH parameter with a size less than `minDHSize`, the TLS connection is destroyed and an error is thrown. **Default:** `1024`. - * `highWaterMark`: {number} Consistent with the readable stream `highWaterMark` parameter. + * `highWaterMark` {number} Consistent with the readable stream `highWaterMark` parameter. **Default:** `16 * 1024`. * `secureContext`: TLS context object created with [`tls.createSecureContext()`][]. If a `secureContext` is _not_ provided, one @@ -2071,7 +2071,7 @@ changes: **Default:** none, see `minVersion`. * `sessionIdContext` {string} Opaque identifier used by servers to ensure session state is not shared between applications. Unused by clients. - * `ticketKeys`: {Buffer} 48-bytes of cryptographically strong pseudorandom + * `ticketKeys` {Buffer} 48-bytes of cryptographically strong pseudorandom data. See [Session Resumption][] for more information. * `sessionTimeout` {number} The number of seconds after which a TLS session created by the server will no longer be resumable. See @@ -2202,7 +2202,7 @@ changes: --> * `options` {Object} - * `ALPNProtocols`: {string\[]|Buffer\[]|TypedArray\[]|DataView\[]|Buffer| + * `ALPNProtocols` {string\[]|Buffer\[]|TypedArray\[]|DataView\[]|Buffer| TypedArray|DataView} An array of strings, `Buffer`s, `TypedArray`s, or `DataView`s, or a single `Buffer`, `TypedArray`, or `DataView` containing the supported ALPN @@ -2210,7 +2210,7 @@ changes: e.g. `0x05hello0x05world`, where the first byte is the length of the next protocol name. Passing an array is usually much simpler, e.g. `['hello', 'world']`. (Protocols should be ordered by their priority.) - * `ALPNCallback`: {Function} If set, this will be called when a + * `ALPNCallback` {Function} If set, this will be called when a client opens a connection using the ALPN extension. One argument will be passed to the callback: an object containing `servername` and `protocols` fields, respectively containing the server name from @@ -2249,7 +2249,7 @@ changes: If `callback` is called with a falsy `ctx` argument, the default secure context of the server will be used. If `SNICallback` wasn't provided the default callback with high-level API will be used (see below). - * `ticketKeys`: {Buffer} 48-bytes of cryptographically strong pseudorandom + * `ticketKeys` {Buffer} 48-bytes of cryptographically strong pseudorandom data. See [Session Resumption][] for more information. * `pskCallback` {Function} For TLS-PSK negotiation, see [Pre-shared keys][]. * `pskIdentityHint` {string} optional hint to send to a client to help @@ -2342,6 +2342,54 @@ openssl pkcs12 -certpbe AES-256-CBC -export -out client-cert.pem \ The server can be tested by connecting to it using the example client from [`tls.connect()`][]. +## `tls.setDefaultCACertificates(certs)` + + + +* `certs` {string\[]|ArrayBufferView\[]} An array of CA certificates in PEM format. + +Sets the default CA certificates used by Node.js TLS clients. If the provided +certificates are parsed successfully, they will become the default CA +certificate list returned by [`tls.getCACertificates()`][] and used +by subsequent TLS connections that don't specify their own CA certificates. +The certificates will be deduplicated before being set as the default. + +This function only affects the current Node.js thread. Previous +sessions cached by the HTTPS agent won't be affected by this change, so +this method should be called before any unwanted cachable TLS connections are +made. + +To use system CA certificates as the default: + +```cjs +const tls = require('node:tls'); +tls.setDefaultCACertificates(tls.getCACertificates('system')); +``` + +```mjs +import tls from 'node:tls'; +tls.setDefaultCACertificates(tls.getCACertificates('system')); +``` + +This function completely replaces the default CA certificate list. To add additional +certificates to the existing defaults, get the current certificates and append to them: + +```cjs +const tls = require('node:tls'); +const currentCerts = tls.getCACertificates('default'); +const additionalCerts = ['-----BEGIN CERTIFICATE-----\n...']; +tls.setDefaultCACertificates([...currentCerts, ...additionalCerts]); +``` + +```mjs +import tls from 'node:tls'; +const currentCerts = tls.getCACertificates('default'); +const additionalCerts = ['-----BEGIN CERTIFICATE-----\n...']; +tls.setDefaultCACertificates([...currentCerts, ...additionalCerts]); +``` + ## `tls.getCACertificates([type])` -* {string\[]} +* Type: {string\[]} An immutable array of strings representing the root certificates (in PEM format) from the bundled Mozilla CA store as supplied by the current Node.js version. @@ -2432,7 +2480,7 @@ information. added: v11.4.0 --> -* {string} The default value of the `maxVersion` option of +* Type: {string} The default value of the `maxVersion` option of [`tls.createSecureContext()`][]. It can be assigned any of the supported TLS protocol versions, `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. **Default:** `'TLSv1.3'`, unless changed using CLI options. Using @@ -2446,7 +2494,7 @@ added: v11.4.0 added: v11.4.0 --> -* {string} The default value of the `minVersion` option of +* Type: {string} The default value of the `minVersion` option of [`tls.createSecureContext()`][]. It can be assigned any of the supported TLS protocol versions, `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Versions before TLSv1.2 may require downgrading the [OpenSSL Security Level][]. @@ -2464,7 +2512,7 @@ added: - v18.16.0 --> -* {string} The default value of the `ciphers` option of +* Type: {string} The default value of the `ciphers` option of [`tls.createSecureContext()`][]. It can be assigned any of the supported OpenSSL ciphers. Defaults to the content of `crypto.constants.defaultCoreCipherList`, unless changed using CLI options diff --git a/doc/api/tracing.md b/doc/api/tracing.md index 1b18243a5ad1d3..61c29dd1763393 100644 --- a/doc/api/tracing.md +++ b/doc/api/tracing.md @@ -147,7 +147,7 @@ set of enabled trace event categories. added: v10.0.0 --> -* {string} +* Type: {string} A comma-separated list of the trace event categories covered by this `Tracing` object. @@ -210,7 +210,7 @@ Enables this `Tracing` object for the set of categories covered by the added: v10.0.0 --> -* {boolean} `true` only if the `Tracing` object has been enabled. +* Type: {boolean} `true` only if the `Tracing` object has been enabled. ### `trace_events.createTracing(options)` diff --git a/doc/api/tty.md b/doc/api/tty.md index 1139355ed0f280..a7c4c3a48d8707 100644 --- a/doc/api/tty.md +++ b/doc/api/tty.md @@ -110,7 +110,7 @@ changes: * `fd` {number} A file descriptor associated with a TTY. * `options` {Object} Options passed to parent `net.Socket`, see `options` of [`net.Socket` constructor][]. -* Returns {tty.ReadStream} +* Returns: {tty.ReadStream} Creates a `ReadStream` for `fd` associated with a TTY. @@ -121,7 +121,7 @@ added: v0.5.8 --> * `fd` {number} A file descriptor associated with a TTY. -* Returns {tty.WriteStream} +* Returns: {tty.WriteStream} Creates a `WriteStream` for `fd` associated with a TTY. diff --git a/doc/api/url.md b/doc/api/url.md index 9d39e3cb9e26d2..ea28c184d6d32b 100644 --- a/doc/api/url.md +++ b/doc/api/url.md @@ -207,7 +207,7 @@ myURL = new URL('foo:Example.com/', 'https://example.org/'); #### `url.hash` -* {string} +* Type: {string} Gets and sets the fragment portion of the URL. @@ -228,7 +228,7 @@ percent-encode may vary somewhat from what the [`url.parse()`][] and #### `url.host` -* {string} +* Type: {string} Gets and sets the host portion of the URL. @@ -246,7 +246,7 @@ Invalid host values assigned to the `host` property are ignored. #### `url.hostname` -* {string} +* Type: {string} Gets and sets the host name portion of the URL. The key difference between `url.host` and `url.hostname` is that `url.hostname` does _not_ include the @@ -272,7 +272,7 @@ Invalid host name values assigned to the `hostname` property are ignored. #### `url.href` -* {string} +* Type: {string} Gets and sets the serialized URL. @@ -306,7 +306,7 @@ changes: returns `'null'` for it. --> -* {string} +* Type: {string} Gets the read-only serialization of the URL's origin. @@ -327,7 +327,7 @@ console.log(idnURL.hostname); #### `url.password` -* {string} +* Type: {string} Gets and sets the password portion of the URL. @@ -348,7 +348,7 @@ percent-encode may vary somewhat from what the [`url.parse()`][] and #### `url.pathname` -* {string} +* Type: {string} Gets and sets the path portion of the URL. @@ -376,7 +376,7 @@ changes: description: The scheme "gopher" is no longer special. --> -* {string} +* Type: {string} Gets and sets the port portion of the URL. @@ -459,7 +459,7 @@ console.log(myURL.port); #### `url.protocol` -* {string} +* Type: {string} Gets and sets the protocol portion of the URL. @@ -524,7 +524,7 @@ According to the WHATWG URL Standard, special protocol schemes are `ftp`, #### `url.search` -* {string} +* Type: {string} Gets and sets the serialized query portion of the URL. @@ -545,7 +545,7 @@ and [`url.format()`][] methods would produce. #### `url.searchParams` -* {URLSearchParams} +* Type: {URLSearchParams} Gets the [`URLSearchParams`][] object representing the query parameters of the URL. This property is read-only but the `URLSearchParams` object it provides @@ -572,7 +572,7 @@ console.log(myURL.search); // prints ?foo=%7Ebar #### `url.username` -* {string} +* Type: {string} Gets and sets the username portion of the URL. @@ -716,7 +716,7 @@ added: v22.1.0 Parses a string as a URL. If `base` is provided, it will be used as the base URL for the purpose of resolving non-absolute `input` URLs. Returns `null` -if `input` is not a valid. +if the parameters can't be resolved to a valid URL. ### Class: `URLSearchParams` diff --git a/doc/api/util.md b/doc/api/util.md index 979b5e9e81f1e9..30ef8da1bb07de 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -185,7 +185,7 @@ let log = debuglog('internals', (debug) => { added: v14.9.0 --> -* {boolean} +* Type: {boolean} The `util.debuglog().enabled` getter is used to create a test that can be used in conditionals based on the existence of the `NODE_DEBUG` environment variable. @@ -338,8 +338,8 @@ added: v22.15.0 * `expected` {Array|string} The second value to compare * Returns: {Array} An array of difference entries. Each entry is an array with two elements: - * Index 0: {number} Operation code: `-1` for delete, `0` for no-op/unchanged, `1` for insert - * Index 1: {string} The value associated with the operation + * `0` {number} Operation code: `-1` for delete, `0` for no-op/unchanged, `1` for insert + * `1` {string} The value associated with the operation * Algorithm complexity: O(N\*D), where: @@ -729,6 +729,16 @@ fs.access('file/that/does/not/exist', (err) => { }); ``` +## `util.setTraceSigInt(enable)` + + + +* `enable` {boolean} + +Enable or disable printing a stack trace on `SIGINT`. The API is only available on the main thread. + ## `util.inherits(constructor, superConstructor)` -* {symbol} that can be used to declare custom inspect functions. +* Type: {symbol} that can be used to declare custom inspect functions. In addition to being accessible through `util.inspect.custom`, this symbol is [registered globally][global symbol registry] and can be @@ -1607,7 +1617,7 @@ console.log(String(myMIME)); ### `mime.type` -* {string} +* Type: {string} Gets and sets the type portion of the MIME. @@ -1639,7 +1649,7 @@ console.log(String(myMIME)); ### `mime.subtype` -* {string} +* Type: {string} Gets and sets the subtype portion of the MIME. @@ -1671,7 +1681,7 @@ console.log(String(myMIME)); ### `mime.essence` -* {string} +* Type: {string} Gets the essence of the MIME. This property is read only. Use `mime.type` or `mime.subtype` to alter the MIME. @@ -1704,7 +1714,7 @@ console.log(String(myMIME)); ### `mime.params` -* {MIMEParams} +* Type: {MIMEParams} Gets the [`MIMEParams`][] object representing the parameters of the MIME. This property is read-only. See @@ -1941,10 +1951,12 @@ changes: times. If `true`, all values will be collected in an array. If `false`, values for the option are last-wins. **Default:** `false`. * `short` {string} A single character alias for the option. - * `default` {string | boolean | string\[] | boolean\[]} The default value to - be used if (and only if) the option does not appear in the arguments to be - parsed. It must be of the same type as the `type` property. When `multiple` - is `true`, it must be an array. + * `default` {string | boolean | string\[] | boolean\[]} The value to assign to + the option if it does not appear in the arguments to be parsed. The value + must match the type specified by the `type` property. If `multiple` is + `true`, it must be an array. No default value is applied when the option + does appear in the arguments to be parsed, even if the provided value + is falsy. * `strict` {boolean} Should an error be thrown when unknown arguments are encountered, or when arguments are passed that do not match the `type` configured in `options`. @@ -2351,7 +2363,7 @@ changes: description: This is now defined as a shared symbol. --> -* {symbol} that can be used to declare custom promisified variants of functions, +* Type: {symbol} that can be used to declare custom promisified variants of functions, see [Custom promisified functions][]. In addition to being accessible through `util.promisify.custom`, this @@ -2594,20 +2606,20 @@ If `textDecoder.fatal` is `true`, decoding errors that occur will result in a ### `textDecoder.encoding` -* {string} +* Type: {string} The encoding supported by the `TextDecoder` instance. ### `textDecoder.fatal` -* {boolean} +* Type: {boolean} The value will be `true` if decoding errors result in a `TypeError` being thrown. ### `textDecoder.ignoreBOM` -* {boolean} +* Type: {boolean} The value will be `true` if the decoding result will include the byte order mark. @@ -2664,7 +2676,7 @@ const { read, written } = encoder.encodeInto(src, dest); ### `textEncoder.encoding` -* {string} +* Type: {string} The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. diff --git a/doc/api/vm.md b/doc/api/vm.md index 958e17f299e481..309f24fabdce32 100644 --- a/doc/api/vm.md +++ b/doc/api/vm.md @@ -120,7 +120,7 @@ any global object; rather, it is bound before each run, just for that run. added: v5.7.0 --> -* {boolean|undefined} +* Type: {boolean|undefined} When `cachedData` is supplied to create the `vm.Script`, this value will be set to either `true` or `false` depending on acceptance of the data by V8. @@ -369,7 +369,7 @@ added: - v18.13.0 --> -* {string|undefined} +* Type: {string|undefined} When the script is compiled from a source that contains a source map magic comment, this property will be set to the URL of the source map. @@ -585,7 +585,7 @@ the ECMAScript specification. ### `module.error` -* {any} +* Type: {any} If the `module.status` is `'errored'`, this property contains the exception thrown by the module during evaluation. If the status is anything else, @@ -626,7 +626,7 @@ Record][]s in the ECMAScript specification. ### `module.identifier` -* {string} +* Type: {string} The identifier of the current module, as set in the constructor. @@ -701,7 +701,7 @@ Record][]s in the ECMAScript specification. ### `module.namespace` -* {Object} +* Type: {Object} The namespace object of the module. This is only available after linking (`module.link()`) has completed. @@ -711,7 +711,7 @@ specification. ### `module.status` -* {string} +* Type: {string} The current status of the module. Will be one of: @@ -1063,7 +1063,7 @@ added: - v20.12.0 --> -* {Object} +* Type: {Object} Returns an object containing commonly used constants for VM operations. diff --git a/doc/api/wasi.md b/doc/api/wasi.md index 9b67d9a942b60a..21d0febcb1e1b7 100644 --- a/doc/api/wasi.md +++ b/doc/api/wasi.md @@ -251,7 +251,7 @@ added: - v12.16.0 --> -* {Object} +* Type: {Object} `wasiImport` is an object that implements the WASI system call API. This object should be passed as the `wasi_snapshot_preview1` import during the instantiation diff --git a/doc/api/webcrypto.md b/doc/api/webcrypto.md index 0bf068bcd8157f..91564069d79b14 100644 --- a/doc/api/webcrypto.md +++ b/doc/api/webcrypto.md @@ -553,9 +553,9 @@ added: v15.0.0 added: v15.0.0 --> -* `algorithm`: {RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} -* `key`: {CryptoKey} -* `data`: {ArrayBuffer|TypedArray|DataView|Buffer} +* `algorithm` {RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} +* `key` {CryptoKey} +* `data` {ArrayBuffer|TypedArray|DataView|Buffer} * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. Using the method and parameters specified in `algorithm` and the keying @@ -588,9 +588,9 @@ changes: -* `algorithm`: {EcdhKeyDeriveParams|HkdfParams|Pbkdf2Params} -* `baseKey`: {CryptoKey} -* `length`: {number|null} **Default:** `null` +* `algorithm` {EcdhKeyDeriveParams|HkdfParams|Pbkdf2Params} +* `baseKey` {CryptoKey} +* `length` {number|null} **Default:** `null` * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. @@ -628,11 +628,11 @@ changes: -* `algorithm`: {EcdhKeyDeriveParams|HkdfParams|Pbkdf2Params} -* `baseKey`: {CryptoKey} -* `derivedKeyAlgorithm`: {string|Algorithm|HmacImportParams|AesDerivedKeyParams} -* `extractable`: {boolean} -* `keyUsages`: {string\[]} See [Key usages][]. +* `algorithm` {EcdhKeyDeriveParams|HkdfParams|Pbkdf2Params} +* `baseKey` {CryptoKey} +* `derivedKeyAlgorithm` {string|Algorithm|HmacImportParams|AesDerivedKeyParams} +* `extractable` {boolean} +* `keyUsages` {string\[]} See [Key usages][]. * Returns: {Promise} Fulfills with a {CryptoKey} upon success. @@ -660,8 +660,8 @@ The algorithms currently supported include: added: v15.0.0 --> -* `algorithm`: {string|Algorithm} -* `data`: {ArrayBuffer|TypedArray|DataView|Buffer} +* `algorithm` {string|Algorithm} +* `data` {ArrayBuffer|TypedArray|DataView|Buffer} * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. Using the method identified by `algorithm`, `subtle.digest()` attempts to @@ -684,9 +684,9 @@ whose value is one of the above. added: v15.0.0 --> -* `algorithm`: {RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} -* `key`: {CryptoKey} -* `data`: {ArrayBuffer|TypedArray|DataView|Buffer} +* `algorithm` {RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} +* `key` {CryptoKey} +* `data` {ArrayBuffer|TypedArray|DataView|Buffer} * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. Using the method and parameters specified by `algorithm` and the keying @@ -717,8 +717,8 @@ changes: description: Removed `'NODE-DSA'` JWK export. --> -* `format`: {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. -* `key`: {CryptoKey} +* `format` {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. +* `key` {CryptoKey} * Returns: {Promise} Fulfills with an {ArrayBuffer|Object} upon success. Exports the given key into the specified format, if supported. @@ -756,12 +756,12 @@ added: v15.0.0 -* `algorithm`: {string|Algorithm|RsaHashedKeyGenParams|EcKeyGenParams|HmacKeyGenParams|AesKeyGenParams} +* `algorithm` {string|Algorithm|RsaHashedKeyGenParams|EcKeyGenParams|HmacKeyGenParams|AesKeyGenParams} -* `extractable`: {boolean} -* `keyUsages`: {string\[]} See [Key usages][]. +* `extractable` {boolean} +* `keyUsages` {string\[]} See [Key usages][]. * Returns: {Promise} Fulfills with a {CryptoKey|CryptoKeyPair} upon success. Using the method and parameters provided in `algorithm`, `subtle.generateKey()` @@ -805,17 +805,17 @@ changes: description: Removed `'NODE-DSA'` JWK import. --> -* `format`: {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. -* `keyData`: {ArrayBuffer|TypedArray|DataView|Buffer|Object} +* `format` {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. +* `keyData` {ArrayBuffer|TypedArray|DataView|Buffer|Object} -* `algorithm`: {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} +* `algorithm` {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} -* `extractable`: {boolean} -* `keyUsages`: {string\[]} See [Key usages][]. +* `extractable` {boolean} +* `keyUsages` {string\[]} See [Key usages][]. * Returns: {Promise} Fulfills with a {CryptoKey} upon success. The `subtle.importKey()` method attempts to interpret the provided `keyData` @@ -860,9 +860,9 @@ changes: -* `algorithm`: {string|Algorithm|RsaPssParams|EcdsaParams|Ed448Params} -* `key`: {CryptoKey} -* `data`: {ArrayBuffer|TypedArray|DataView|Buffer} +* `algorithm` {string|Algorithm|RsaPssParams|EcdsaParams|Ed448Params} +* `key` {CryptoKey} +* `data` {ArrayBuffer|TypedArray|DataView|Buffer} * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. @@ -887,19 +887,19 @@ The algorithms currently supported include: added: v15.0.0 --> -* `format`: {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. -* `wrappedKey`: {ArrayBuffer|TypedArray|DataView|Buffer} -* `unwrappingKey`: {CryptoKey} +* `format` {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. +* `wrappedKey` {ArrayBuffer|TypedArray|DataView|Buffer} +* `unwrappingKey` {CryptoKey} -* `unwrapAlgo`: {string|Algorithm|RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} -* `unwrappedKeyAlgo`: {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} +* `unwrapAlgo` {string|Algorithm|RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} +* `unwrappedKeyAlgo` {string|Algorithm|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} -* `extractable`: {boolean} -* `keyUsages`: {string\[]} See [Key usages][]. +* `extractable` {boolean} +* `keyUsages` {string\[]} See [Key usages][]. * Returns: {Promise} Fulfills with a {CryptoKey} upon success. In cryptography, "wrapping a key" refers to exporting and then encrypting the @@ -950,10 +950,10 @@ changes: -* `algorithm`: {string|Algorithm|RsaPssParams|EcdsaParams|Ed448Params} -* `key`: {CryptoKey} -* `signature`: {ArrayBuffer|TypedArray|DataView|Buffer} -* `data`: {ArrayBuffer|TypedArray|DataView|Buffer} +* `algorithm` {string|Algorithm|RsaPssParams|EcdsaParams|Ed448Params} +* `key` {CryptoKey} +* `signature` {ArrayBuffer|TypedArray|DataView|Buffer} +* `data` {ArrayBuffer|TypedArray|DataView|Buffer} * Returns: {Promise} Fulfills with a {boolean} upon success. @@ -980,10 +980,10 @@ added: v15.0.0 -* `format`: {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. -* `key`: {CryptoKey} -* `wrappingKey`: {CryptoKey} -* `wrapAlgo`: {string|Algorithm|RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} +* `format` {string} Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. +* `key` {CryptoKey} +* `wrappingKey` {CryptoKey} +* `wrapAlgo` {string|Algorithm|RsaOaepParams|AesCtrParams|AesCbcParams|AesGcmParams} * Returns: {Promise} Fulfills with an {ArrayBuffer} upon success. @@ -1567,7 +1567,7 @@ added: v15.0.0 added: v15.0.0 --> -#### `pbkdb2Params.hash` +#### `pbkdf2Params.hash` -* `chunk`: {Buffer|TypedArray|DataView} +* `chunk` {Buffer|TypedArray|DataView} Appends a new chunk of data to the {ReadableStream}'s queue. @@ -1075,7 +1075,7 @@ Releases this writer's lock on the underlying {ReadableStream}. added: v16.5.0 --> -* `chunk`: {any} +* `chunk` {any} * Returns: A promise fulfilled with `undefined`. Appends a new chunk of data to the {WritableStream}'s queue. diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index 5afeaaa9c17fd7..e307bfe9398604 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -164,7 +164,7 @@ if (isMainThread) { added: v22.14.0 --> -* {boolean} +* Type: {boolean} Is `true` if this code is running inside of an internal [`Worker`][] thread (e.g the loader thread). @@ -206,7 +206,7 @@ console.log(isInternalThread); // false added: v10.5.0 --> -* {boolean} +* Type: {boolean} Is `true` if this code is not running inside of a [`Worker`][] thread. @@ -427,7 +427,7 @@ events using it. added: v10.5.0 --> -* {null|MessagePort} +* Type: {null|MessagePort} If this thread is a [`Worker`][], this is a [`MessagePort`][] allowing communication with the parent thread. Messages sent using @@ -636,7 +636,7 @@ added: - v12.16.0 --> -* {Object} +* Type: {Object} * `maxYoungGenerationSizeMb` {number} * `maxOldGenerationSizeMb` {number} * `codeRangeSizeMb` {number} @@ -654,7 +654,7 @@ If this is used in the main thread, its value is an empty object. added: v11.14.0 --> -* {symbol} +* Type: {symbol} A special value that can be passed as the `env` option of the [`Worker`][] constructor, to indicate that the current thread and the Worker thread should @@ -709,7 +709,7 @@ instances spawned from the current context. added: v10.5.0 --> -* {integer} +* Type: {integer} An integer identifier for the current thread. On the corresponding worker object (if there is any), it is available as [`worker.threadId`][]. @@ -1596,6 +1596,19 @@ added: v10.5.0 The `'online'` event is emitted when the worker thread has started executing JavaScript code. +### `worker.cpuUsage([prev])` + + + +* Returns: {Promise} + +This method returns a `Promise` that will resolve to an object identical to [`process.threadCpuUsage()`][], +or reject with an [`ERR_WORKER_NOT_RUNNING`][] error if the worker is no longer running. +This methods allows the statistics to be observed from outside the actual thread. + ### `worker.getHeapSnapshot([options])` -* {Object} +* Type: {Object} * `maxYoungGenerationSizeMb` {number} * `maxOldGenerationSizeMb` {number} * `codeRangeSizeMb` {number} @@ -1773,7 +1786,7 @@ If the worker has stopped, the return value is an empty object. added: v10.5.0 --> -* {stream.Readable} +* Type: {stream.Readable} This is a readable stream which contains data written to [`process.stderr`][] inside the worker thread. If `stderr: true` was not passed to the @@ -1786,7 +1799,7 @@ inside the worker thread. If `stderr: true` was not passed to the added: v10.5.0 --> -* {null|stream.Writable} +* Type: {null|stream.Writable} If `stdin: true` was passed to the [`Worker`][] constructor, this is a writable stream. The data written to this stream will be made available in @@ -1798,7 +1811,7 @@ the worker thread as [`process.stdin`][]. added: v10.5.0 --> -* {stream.Readable} +* Type: {stream.Readable} This is a readable stream which contains data written to [`process.stdout`][] inside the worker thread. If `stdout: true` was not passed to the @@ -1830,7 +1843,7 @@ Returns a Promise for the exit code that is fulfilled when the added: v10.5.0 --> -* {integer} +* Type: {integer} An integer identifier for the referenced thread. Inside the worker thread, it is available as [`require('node:worker_threads').threadId`][]. @@ -1949,6 +1962,7 @@ thread spawned will spawn another until the application crashes. [`process.stderr`]: process.md#processstderr [`process.stdin`]: process.md#processstdin [`process.stdout`]: process.md#processstdout +[`process.threadCpuUsage()`]: process.md#processthreadcpuusagepreviousvalue [`process.title`]: process.md#processtitle [`require('node:worker_threads').isMainThread`]: #workerismainthread [`require('node:worker_threads').parentPort.on('message')`]: #event-message diff --git a/doc/api/zlib.md b/doc/api/zlib.md index ded1e96d474bc9..c493bcbe7d4d59 100644 --- a/doc/api/zlib.md +++ b/doc/api/zlib.md @@ -74,7 +74,6 @@ import { createReadStream, createWriteStream, } from 'node:fs'; -import process from 'node:process'; import { createGzip } from 'node:zlib'; import { pipeline } from 'node:stream/promises'; @@ -1005,7 +1004,7 @@ expose values under these names. added: v10.0.0 --> -* {number} +* Type: {number} The `zlib.bytesWritten` property specifies the number of bytes written to the engine, before the bytes are processed (compressed or decompressed, @@ -1082,6 +1081,9 @@ Each Zstd-based class takes an `options` object. All options are optional. * `maxOutputLength` {integer} Limits output size when using [convenience methods][]. **Default:** [`buffer.kMaxLength`][] * `info` {boolean} If `true`, returns an object with `buffer` and `engine`. **Default:** `false` +* `dictionary` {Buffer} Optional dictionary used to + improve compression efficiency when compressing or decompressing data that + shares common patterns with the dictionary. For example: diff --git a/doc/api_assets/style.css b/doc/api_assets/style.css index c23e486ef80b82..d314f3d990631b 100644 --- a/doc/api_assets/style.css +++ b/doc/api_assets/style.css @@ -373,6 +373,11 @@ p { padding-bottom: 2rem; } +/* prevent the module-level sticky stability header from overlapping the section headers when clicked */ +#apicontent:has(> .api_stability) a { + scroll-margin-top: 50px; +} + table { border-collapse: collapse; margin: 0 0 1.5rem; @@ -837,6 +842,9 @@ kbd { .api_stability { top: 0; } + #apicontent a { + scroll-margin-top: 0; + } } @media not screen, (max-height: 1000px) { diff --git a/doc/changelogs/CHANGELOG_V22.md b/doc/changelogs/CHANGELOG_V22.md index 2624c69ecccdc3..358b4d9318c9a4 100644 --- a/doc/changelogs/CHANGELOG_V22.md +++ b/doc/changelogs/CHANGELOG_V22.md @@ -9,6 +9,7 @@ +22.19.0
22.18.0
22.17.1
22.17.0
@@ -63,6 +64,160 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + + +## 2025-08-28, Version 22.19.0 'Jod' (LTS), @aduh95 + +### Notable Changes + +* \[[`8e2076a24f`](https://github.com/nodejs/node/commit/8e2076a24f)] - **(SEMVER-MINOR)** **cli**: add NODE\_USE\_SYSTEM\_CA=1 (Joyee Cheung) [#59276](https://github.com/nodejs/node/pull/59276) +* \[[`e592d739c2`](https://github.com/nodejs/node/commit/e592d739c2)] - **(SEMVER-MINOR)** **cli**: support `${pid}` placeholder in --cpu-prof-name (Haram Jeong) [#59072](https://github.com/nodejs/node/pull/59072) +* \[[`cda1dab6e2`](https://github.com/nodejs/node/commit/cda1dab6e2)] - **(SEMVER-MINOR)** **crypto**: add tls.setDefaultCACertificates() (Joyee Cheung) [#58822](https://github.com/nodejs/node/pull/58822) +* \[[`1f184513e9`](https://github.com/nodejs/node/commit/1f184513e9)] - **(SEMVER-MINOR)** **dns**: support max timeout (theanarkh) [#58440](https://github.com/nodejs/node/pull/58440) +* \[[`bace73a173`](https://github.com/nodejs/node/commit/bace73a173)] - **doc**: update the instruction on how to verify releases (Antoine du Hamel) [#59113](https://github.com/nodejs/node/pull/59113) +* \[[`fa9a9e9c69`](https://github.com/nodejs/node/commit/fa9a9e9c69)] - **(SEMVER-MINOR)** **esm**: unflag --experimental-wasm-modules (Guy Bedford) [#57038](https://github.com/nodejs/node/pull/57038) +* \[[`390a9dc20b`](https://github.com/nodejs/node/commit/390a9dc20b)] - **(SEMVER-MINOR)** **http**: add server.keepAliveTimeoutBuffer option (Haram Jeong) [#59243](https://github.com/nodejs/node/pull/59243) +* \[[`c12c5343ad`](https://github.com/nodejs/node/commit/c12c5343ad)] - **lib**: docs deprecate \_http\_\* (Sebastian Beltran) [#59293](https://github.com/nodejs/node/pull/59293) +* \[[`f57ee3d71f`](https://github.com/nodejs/node/commit/f57ee3d71f)] - **(SEMVER-MINOR)** **net**: update net.blocklist to allow file save and file management (alphaleadership) [#58087](https://github.com/nodejs/node/pull/58087) +* \[[`035da74c31`](https://github.com/nodejs/node/commit/035da74c31)] - **(SEMVER-MINOR)** **process**: add threadCpuUsage (Paolo Insogna) [#56467](https://github.com/nodejs/node/pull/56467) +* \[[`8e697d1884`](https://github.com/nodejs/node/commit/8e697d1884)] - **(SEMVER-MINOR)** **zlib**: add dictionary support to zstdCompress and zstdDecompress (lluisemper) [#59240](https://github.com/nodejs/node/pull/59240) + +### Commits + +* \[[`73aa0ae37f`](https://github.com/nodejs/node/commit/73aa0ae37f)] - **assert**: change utils to use index instead of for...of (방진혁) [#59278](https://github.com/nodejs/node/pull/59278) +* \[[`dfe3a11eed`](https://github.com/nodejs/node/commit/dfe3a11eed)] - **benchmark**: remove deprecated \_extend from benchmark (Rafael Gonzaga) [#59228](https://github.com/nodejs/node/pull/59228) +* \[[`9b9d30042a`](https://github.com/nodejs/node/commit/9b9d30042a)] - **benchmark**: add fs warmup to writefile-promises (Bruno Rodrigues) [#59215](https://github.com/nodejs/node/pull/59215) +* \[[`a663f7f954`](https://github.com/nodejs/node/commit/a663f7f954)] - **benchmark**: add calibrate-n script (Rafael Gonzaga) [#59186](https://github.com/nodejs/node/pull/59186) +* \[[`1b9b5bddd6`](https://github.com/nodejs/node/commit/1b9b5bddd6)] - **benchmark**: adjust configuration for string-decoder bench (Rafael Gonzaga) [#59187](https://github.com/nodejs/node/pull/59187) +* \[[`d0ac3319f9`](https://github.com/nodejs/node/commit/d0ac3319f9)] - **benchmark**: add --track to benchmark (Rafael Gonzaga) [#59174](https://github.com/nodejs/node/pull/59174) +* \[[`2044968b86`](https://github.com/nodejs/node/commit/2044968b86)] - **benchmark**: small lint fix on \_cli.js (Rafael Gonzaga) [#59172](https://github.com/nodejs/node/pull/59172) +* \[[`4e519934cb`](https://github.com/nodejs/node/commit/4e519934cb)] - **benchmark**: drop misc/punycode benchmark (Rafael Gonzaga) [#59171](https://github.com/nodejs/node/pull/59171) +* \[[`07e173d969`](https://github.com/nodejs/node/commit/07e173d969)] - **benchmark**: fix sqlite-is-transaction (Rafael Gonzaga) [#59170](https://github.com/nodejs/node/pull/59170) +* \[[`8440b6177f`](https://github.com/nodejs/node/commit/8440b6177f)] - **benchmark**: reduce N for diagnostics\_channel subscribe benchmark (Arthur Angelo) [#59116](https://github.com/nodejs/node/pull/59116) +* \[[`8615ea6db0`](https://github.com/nodejs/node/commit/8615ea6db0)] - **buffer**: cache Environment::GetCurrent to avoid repeated calls (Mert Can Altin) [#59043](https://github.com/nodejs/node/pull/59043) +* \[[`3deb5361d2`](https://github.com/nodejs/node/commit/3deb5361d2)] - **build**: fix node\_use\_sqlite for GN builds (Shelley Vohr) [#59017](https://github.com/nodejs/node/pull/59017) +* \[[`0f0ce63116`](https://github.com/nodejs/node/commit/0f0ce63116)] - **build**: remove suppressions.supp (Rafael Gonzaga) [#59079](https://github.com/nodejs/node/pull/59079) +* \[[`b30a2117dc`](https://github.com/nodejs/node/commit/b30a2117dc)] - **build,deps,tools**: prepare to update to OpenSSL 3.5 (Richard Lau) [#58100](https://github.com/nodejs/node/pull/58100) +* \[[`8e2076a24f`](https://github.com/nodejs/node/commit/8e2076a24f)] - **(SEMVER-MINOR)** **cli**: add NODE\_USE\_SYSTEM\_CA=1 (Joyee Cheung) [#59276](https://github.com/nodejs/node/pull/59276) +* \[[`e592d739c2`](https://github.com/nodejs/node/commit/e592d739c2)] - **(SEMVER-MINOR)** **cli**: support `${pid}` placeholder in --cpu-prof-name (Haram Jeong) [#59072](https://github.com/nodejs/node/pull/59072) +* \[[`b5571047ed`](https://github.com/nodejs/node/commit/b5571047ed)] - **crypto**: prepare webcrypto key import/export for modern algorithms (Filip Skokan) [#59284](https://github.com/nodejs/node/pull/59284) +* \[[`cda1dab6e2`](https://github.com/nodejs/node/commit/cda1dab6e2)] - **(SEMVER-MINOR)** **crypto**: add tls.setDefaultCACertificates() (Joyee Cheung) [#58822](https://github.com/nodejs/node/pull/58822) +* \[[`76dab34fb7`](https://github.com/nodejs/node/commit/76dab34fb7)] - **deps**: support madvise(3C) across ALL illumos revisions (Dan McDonald) [#58237](https://github.com/nodejs/node/pull/58237) +* \[[`19d3ed64b6`](https://github.com/nodejs/node/commit/19d3ed64b6)] - **deps**: update sqlite to 3.50.4 (Node.js GitHub Bot) [#59337](https://github.com/nodejs/node/pull/59337) +* \[[`38bafc59e0`](https://github.com/nodejs/node/commit/38bafc59e0)] - **deps**: V8: backport 493cb53691be (Chengzhong Wu) [#59238](https://github.com/nodejs/node/pull/59238) +* \[[`e8da171cc3`](https://github.com/nodejs/node/commit/e8da171cc3)] - **deps**: update sqlite to 3.50.3 (Node.js GitHub Bot) [#59132](https://github.com/nodejs/node/pull/59132) +* \[[`fd4ba38ab6`](https://github.com/nodejs/node/commit/fd4ba38ab6)] - **deps**: update googletest to 7e17b15 (Node.js GitHub Bot) [#59131](https://github.com/nodejs/node/pull/59131) +* \[[`f71f427b95`](https://github.com/nodejs/node/commit/f71f427b95)] - **deps**: update archs files for openssl-3.0.17 (Node.js GitHub Bot) [#59134](https://github.com/nodejs/node/pull/59134) +* \[[`79c5a8f4d2`](https://github.com/nodejs/node/commit/79c5a8f4d2)] - **deps**: upgrade openssl sources to openssl-3.0.17 (Node.js GitHub Bot) [#59134](https://github.com/nodejs/node/pull/59134) +* \[[`0dcc84cf53`](https://github.com/nodejs/node/commit/0dcc84cf53)] - **deps**: update corepack to 0.34.0 (Node.js GitHub Bot) [#59133](https://github.com/nodejs/node/pull/59133) +* \[[`1f184513e9`](https://github.com/nodejs/node/commit/1f184513e9)] - **(SEMVER-MINOR)** **dns**: support max timeout (theanarkh) [#58440](https://github.com/nodejs/node/pull/58440) +* \[[`f64f5df80e`](https://github.com/nodejs/node/commit/f64f5df80e)] - **doc**: fix `--use-system-ca` history (Joyee Cheung) [#59411](https://github.com/nodejs/node/pull/59411) +* \[[`e22aeaa38f`](https://github.com/nodejs/node/commit/e22aeaa38f)] - **doc**: add missing section for `setReturnArrays` in `sqlite.md` (Edy Silva) [#59074](https://github.com/nodejs/node/pull/59074) +* \[[`e44ef07235`](https://github.com/nodejs/node/commit/e44ef07235)] - **doc**: rename x509.extKeyUsage to x509.keyUsage (Filip Skokan) [#59332](https://github.com/nodejs/node/pull/59332) +* \[[`2c5d0aac5e`](https://github.com/nodejs/node/commit/2c5d0aac5e)] - **doc**: fix Pbkdf2Params hash attribute heading (Filip Skokan) [#59395](https://github.com/nodejs/node/pull/59395) +* \[[`fde94346e5`](https://github.com/nodejs/node/commit/fde94346e5)] - **doc**: fix missing reference links for server.keepAliveTimeoutBuffer (Lee Jiho) [#59356](https://github.com/nodejs/node/pull/59356) +* \[[`9af8bcea58`](https://github.com/nodejs/node/commit/9af8bcea58)] - **doc**: fix grammar in global dispatcher usage (Eng Zer Jun) [#59344](https://github.com/nodejs/node/pull/59344) +* \[[`0edf17198f`](https://github.com/nodejs/node/commit/0edf17198f)] - **doc**: run license-builder (github-actions\[bot]) [#59343](https://github.com/nodejs/node/pull/59343) +* \[[`7f767a2e38`](https://github.com/nodejs/node/commit/7f767a2e38)] - **doc**: correct orthography `eg.` → `e.g.` (Jacob Smith) [#59329](https://github.com/nodejs/node/pull/59329) +* \[[`a46ed50350`](https://github.com/nodejs/node/commit/a46ed50350)] - **doc**: clarify the need of compiler compatible with c++20 (Rafael Gonzaga) [#59297](https://github.com/nodejs/node/pull/59297) +* \[[`212263a305`](https://github.com/nodejs/node/commit/212263a305)] - **doc**: clarify release candidate stability index (Filip Skokan) [#59295](https://github.com/nodejs/node/pull/59295) +* \[[`ce93b8b556`](https://github.com/nodejs/node/commit/ce93b8b556)] - **doc**: add WDYT to glossary (btea) [#59280](https://github.com/nodejs/node/pull/59280) +* \[[`ebaaf2c67f`](https://github.com/nodejs/node/commit/ebaaf2c67f)] - **doc**: add manpage entry for --use-system-ca (Joyee Cheung) [#59273](https://github.com/nodejs/node/pull/59273) +* \[[`43b5a21916`](https://github.com/nodejs/node/commit/43b5a21916)] - **doc**: add path.join and path.normalize clarification (Rafael Gonzaga) [#59262](https://github.com/nodejs/node/pull/59262) +* \[[`409c66d328`](https://github.com/nodejs/node/commit/409c66d328)] - **doc**: fix typo in `test/common/README.md` (Yoo) [#59180](https://github.com/nodejs/node/pull/59180) +* \[[`cbb0a8eb13`](https://github.com/nodejs/node/commit/cbb0a8eb13)] - **doc**: add note on process memoryUsage (fengmk2) [#59026](https://github.com/nodejs/node/pull/59026) +* \[[`9892b15d81`](https://github.com/nodejs/node/commit/9892b15d81)] - **doc**: format safely for `doc-kit` (Aviv Keller) [#59229](https://github.com/nodejs/node/pull/59229) +* \[[`bace73a173`](https://github.com/nodejs/node/commit/bace73a173)] - **doc**: update the instruction on how to verify releases (Antoine du Hamel) [#59113](https://github.com/nodejs/node/pull/59113) +* \[[`b549deac02`](https://github.com/nodejs/node/commit/b549deac02)] - **doc**: copyedit SECURITY.md (Rich Trott) [#59190](https://github.com/nodejs/node/pull/59190) +* \[[`ef1bc3f344`](https://github.com/nodejs/node/commit/ef1bc3f344)] - **doc**: fix broken sentence in `URL.parse` (Superchupu) [#59164](https://github.com/nodejs/node/pull/59164) +* \[[`3c6639e8ec`](https://github.com/nodejs/node/commit/3c6639e8ec)] - **doc**: improve onboarding instructions (Joyee Cheung) [#59159](https://github.com/nodejs/node/pull/59159) +* \[[`6ffaac66bc`](https://github.com/nodejs/node/commit/6ffaac66bc)] - **doc**: add constraints for mem leak to threat model (Rafael Gonzaga) [#58917](https://github.com/nodejs/node/pull/58917) +* \[[`e419d20144`](https://github.com/nodejs/node/commit/e419d20144)] - **doc**: add Aditi-1400 to collaborators (Aditi Singh) [#59157](https://github.com/nodejs/node/pull/59157) +* \[[`ba380f7bf3`](https://github.com/nodejs/node/commit/ba380f7bf3)] - **doc**: avoid suggesting testing fast api with intense loop (Chengzhong Wu) [#59111](https://github.com/nodejs/node/pull/59111) +* \[[`fa1a532f2b`](https://github.com/nodejs/node/commit/fa1a532f2b)] - **doc**: fix typo in writing-test.md (SeokHun) [#59123](https://github.com/nodejs/node/pull/59123) +* \[[`0b93ca3d19`](https://github.com/nodejs/node/commit/0b93ca3d19)] - **doc**: add RafaelGSS as steward July 25 (Rafael Gonzaga) [#59078](https://github.com/nodejs/node/pull/59078) +* \[[`7d747aeac8`](https://github.com/nodejs/node/commit/7d747aeac8)] - **doc**: clarify ERR\_FS\_FILE\_TOO\_LARGE to reflect fs.readFile() I/O limit (Haram Jeong) [#59050](https://github.com/nodejs/node/pull/59050) +* \[[`0b5613f9fe`](https://github.com/nodejs/node/commit/0b5613f9fe)] - **doc**: run license-builder (github-actions\[bot]) [#59056](https://github.com/nodejs/node/pull/59056) +* \[[`1b6b5e72d3`](https://github.com/nodejs/node/commit/1b6b5e72d3)] - **doc**: fix typed list formatting (Aviv Keller) [#59019](https://github.com/nodejs/node/pull/59019) +* \[[`756c7dd639`](https://github.com/nodejs/node/commit/756c7dd639)] - **doc**: refine `util.parseArgs` `default` definition (Slayer95) [#58958](https://github.com/nodejs/node/pull/58958) +* \[[`0b840523a2`](https://github.com/nodejs/node/commit/0b840523a2)] - **doc**: remove unused import in `zlib.md` (coderaiser) [#59041](https://github.com/nodejs/node/pull/59041) +* \[[`3e9ed4b080`](https://github.com/nodejs/node/commit/3e9ed4b080)] - **doc**: add stability index to the `--watch-kill-signal` flag (Dario Piotrowicz) [#58997](https://github.com/nodejs/node/pull/58997) +* \[[`cb08a5d43f`](https://github.com/nodejs/node/commit/cb08a5d43f)] - **doc**: add missing `` blocks (Antoine du Hamel) [#58995](https://github.com/nodejs/node/pull/58995) +* \[[`4a42360fe5`](https://github.com/nodejs/node/commit/4a42360fe5)] - **doc**: add scroll margin to links (Roman Reiss) [#58982](https://github.com/nodejs/node/pull/58982) +* \[[`9d073f32da`](https://github.com/nodejs/node/commit/9d073f32da)] - **doc**: add sponsorship link to RafaelGSS (Rafael Gonzaga) [#58983](https://github.com/nodejs/node/pull/58983) +* \[[`3cc11fc9ac`](https://github.com/nodejs/node/commit/3cc11fc9ac)] - **domain**: remove deprecated API call (Alex Yang) [#59339](https://github.com/nodejs/node/pull/59339) +* \[[`fa9a9e9c69`](https://github.com/nodejs/node/commit/fa9a9e9c69)] - **(SEMVER-MINOR)** **esm**: unflag --experimental-wasm-modules (Guy Bedford) [#57038](https://github.com/nodejs/node/pull/57038) +* \[[`177ed3b3dd`](https://github.com/nodejs/node/commit/177ed3b3dd)] - **esm**: js-string Wasm builtins in ESM Integration (Guy Bedford) [#59020](https://github.com/nodejs/node/pull/59020) +* \[[`4619fe0e04`](https://github.com/nodejs/node/commit/4619fe0e04)] - **fs**: fix glob TypeError on restricted dirs (Sylphy-0xd3ac) [#58674](https://github.com/nodejs/node/pull/58674) +* \[[`ad2089e32d`](https://github.com/nodejs/node/commit/ad2089e32d)] - **fs**: correct error message when FileHandle is transferred (Alex Yang) [#59156](https://github.com/nodejs/node/pull/59156) +* \[[`390a9dc20b`](https://github.com/nodejs/node/commit/390a9dc20b)] - **(SEMVER-MINOR)** **http**: add server.keepAliveTimeoutBuffer option (Haram Jeong) [#59243](https://github.com/nodejs/node/pull/59243) +* \[[`659002359d`](https://github.com/nodejs/node/commit/659002359d)] - **http2**: set Http2Stream#sentHeaders for raw headers (Darshan Sen) [#59244](https://github.com/nodejs/node/pull/59244) +* \[[`d02831ef73`](https://github.com/nodejs/node/commit/d02831ef73)] - **inspector**: initial support for Network.loadNetworkResource (Shima Ryuhei) [#58077](https://github.com/nodejs/node/pull/58077) +* \[[`264a838779`](https://github.com/nodejs/node/commit/264a838779)] - **lib**: add trace-sigint APIs (theanarkh) [#59040](https://github.com/nodejs/node/pull/59040) +* \[[`d22d2fa6d4`](https://github.com/nodejs/node/commit/d22d2fa6d4)] - **lib**: optimize writable stream buffer clearing (Yoo) [#59406](https://github.com/nodejs/node/pull/59406) +* \[[`a5e9759409`](https://github.com/nodejs/node/commit/a5e9759409)] - **lib**: do not modify prototype deprecated asyncResource (RafaelGSS) [#59195](https://github.com/nodejs/node/pull/59195) +* \[[`9254257fc0`](https://github.com/nodejs/node/commit/9254257fc0)] - **lib**: restructure assert to become a class (Miguel Marcondes Filho) [#58253](https://github.com/nodejs/node/pull/58253) +* \[[`946eab8d77`](https://github.com/nodejs/node/commit/946eab8d77)] - **lib**: handle superscript variants on windows device (Rafael Gonzaga) [#59261](https://github.com/nodejs/node/pull/59261) +* \[[`cd857a97b5`](https://github.com/nodejs/node/commit/cd857a97b5)] - **lib**: use validateString (hotpineapple) [#59296](https://github.com/nodejs/node/pull/59296) +* \[[`c12c5343ad`](https://github.com/nodejs/node/commit/c12c5343ad)] - **lib**: docs deprecate \_http\_\* (Sebastian Beltran) [#59293](https://github.com/nodejs/node/pull/59293) +* \[[`a28e5f0938`](https://github.com/nodejs/node/commit/a28e5f0938)] - **lib**: add type names in source mapped stack traces (Chengzhong Wu) [#58976](https://github.com/nodejs/node/pull/58976) +* \[[`6aec5aee7c`](https://github.com/nodejs/node/commit/6aec5aee7c)] - **lib**: prefer AsyncIteratorPrototype primordial (René) [#59097](https://github.com/nodejs/node/pull/59097) +* \[[`e704349858`](https://github.com/nodejs/node/commit/e704349858)] - **lib**: fix incorrect `ArrayBufferPrototypeGetDetached` primordial type (Dario Piotrowicz) [#58978](https://github.com/nodejs/node/pull/58978) +* \[[`2fc25fd400`](https://github.com/nodejs/node/commit/2fc25fd400)] - **lib**: flag to conditionally modify proto on deprecate (Rafael Gonzaga) [#58928](https://github.com/nodejs/node/pull/58928) +* \[[`446ee98e00`](https://github.com/nodejs/node/commit/446ee98e00)] - **meta**: clarify pr objection process further (James M Snell) [#59096](https://github.com/nodejs/node/pull/59096) +* \[[`46c339e4f3`](https://github.com/nodejs/node/commit/46c339e4f3)] - **meta**: add mailmap entry for aditi-1400 (Aditi) [#59316](https://github.com/nodejs/node/pull/59316) +* \[[`70a586261f`](https://github.com/nodejs/node/commit/70a586261f)] - **meta**: add tsc and build team as codeowners building.md (Rafael Gonzaga) [#59298](https://github.com/nodejs/node/pull/59298) +* \[[`e666e06781`](https://github.com/nodejs/node/commit/e666e06781)] - **meta**: add nodejs/path to path files (Rafael Gonzaga) [#59289](https://github.com/nodejs/node/pull/59289) +* \[[`251b65dd6c`](https://github.com/nodejs/node/commit/251b65dd6c)] - **meta**: move one or more collaborators to emeritus (Node.js GitHub Bot) [#59140](https://github.com/nodejs/node/pull/59140) +* \[[`c8a7964da8`](https://github.com/nodejs/node/commit/c8a7964da8)] - **meta**: add marco-ippolito to security release stewards (Marco Ippolito) [#58944](https://github.com/nodejs/node/pull/58944) +* \[[`0eec5cc492`](https://github.com/nodejs/node/commit/0eec5cc492)] - **module**: fix conditions override in synchronous resolve hooks (Joyee Cheung) [#59011](https://github.com/nodejs/node/pull/59011) +* \[[`4acf7cd6d3`](https://github.com/nodejs/node/commit/4acf7cd6d3)] - **module**: throw error when re-runing errored module jobs (Joyee Cheung) [#58957](https://github.com/nodejs/node/pull/58957) +* \[[`f57ee3d71f`](https://github.com/nodejs/node/commit/f57ee3d71f)] - **(SEMVER-MINOR)** **net**: update net.blocklist to allow file save and file management (alphaleadership) [#58087](https://github.com/nodejs/node/pull/58087) +* \[[`4aefcfc318`](https://github.com/nodejs/node/commit/4aefcfc318)] - **node-api**: reword "implementation in an alternative VM" as implementable (Chengzhong Wu) [#59036](https://github.com/nodejs/node/pull/59036) +* \[[`ff6be2ed5d`](https://github.com/nodejs/node/commit/ff6be2ed5d)] - **node-api,doc**: update links to ecma262 with section names (Chengzhong Wu) [#59087](https://github.com/nodejs/node/pull/59087) +* \[[`8d60602677`](https://github.com/nodejs/node/commit/8d60602677)] - **perf\_hooks**: do not expose SafeMap via Histogram wrapper (René) [#59094](https://github.com/nodejs/node/pull/59094) +* \[[`035da74c31`](https://github.com/nodejs/node/commit/035da74c31)] - **(SEMVER-MINOR)** **process**: add threadCpuUsage (Paolo Insogna) [#56467](https://github.com/nodejs/node/pull/56467) +* \[[`74e1aa4d06`](https://github.com/nodejs/node/commit/74e1aa4d06)] - **process**: make execve's args argument optional (Allon Murienik) [#58412](https://github.com/nodejs/node/pull/58412) +* \[[`3366e60bd9`](https://github.com/nodejs/node/commit/3366e60bd9)] - **src**: use simdjson to parse SEA configuration (Joyee Cheung) [#59323](https://github.com/nodejs/node/pull/59323) +* \[[`63cc06977a`](https://github.com/nodejs/node/commit/63cc06977a)] - **src**: mark realm leaf classes final (Anna Henningsen) [#59355](https://github.com/nodejs/node/pull/59355) +* \[[`133d410cd9`](https://github.com/nodejs/node/commit/133d410cd9)] - **src**: use C++20 `contains()` method (iknoom) [#59304](https://github.com/nodejs/node/pull/59304) +* \[[`57fe96fe49`](https://github.com/nodejs/node/commit/57fe96fe49)] - **src**: added CHECK\_NOT\_NULL check for multiple eq\_wrap\_async (F3lixTheCat) [#59267](https://github.com/nodejs/node/pull/59267) +* \[[`a8f381a6c5`](https://github.com/nodejs/node/commit/a8f381a6c5)] - **src**: add nullptr checks in `StreamPipe::New` (Burkov Egor) [#57613](https://github.com/nodejs/node/pull/57613) +* \[[`0769e5a0dc`](https://github.com/nodejs/node/commit/0769e5a0dc)] - **src**: call unmask after install signal handler (theanarkh) [#59059](https://github.com/nodejs/node/pull/59059) +* \[[`1e7639e9e1`](https://github.com/nodejs/node/commit/1e7639e9e1)] - **src**: use `FastStringKey` for `TrackV8FastApiCall` (Anna Henningsen) [#59148](https://github.com/nodejs/node/pull/59148) +* \[[`9075a1a4bf`](https://github.com/nodejs/node/commit/9075a1a4bf)] - **src**: use C++20 `consteval` for `FastStringKey` (Anna Henningsen) [#59148](https://github.com/nodejs/node/pull/59148) +* \[[`5a0fd5689b`](https://github.com/nodejs/node/commit/5a0fd5689b)] - **src**: remove declarations of removed BaseObject static fns (Anna Henningsen) [#59093](https://github.com/nodejs/node/pull/59093) +* \[[`c637a2c41d`](https://github.com/nodejs/node/commit/c637a2c41d)] - **src**: add cache to nearest parent package json (Ilyas Shabi) [#59086](https://github.com/nodejs/node/pull/59086) +* \[[`3375a6cfee`](https://github.com/nodejs/node/commit/3375a6cfee)] - **test**: deflake sequential/test-tls-session-timeout (Joyee Cheung) [#59423](https://github.com/nodejs/node/pull/59423) +* \[[`438cb11a15`](https://github.com/nodejs/node/commit/438cb11a15)] - **test**: update WPT resources,WebCryptoAPI,webstorage (Filip Skokan) [#59311](https://github.com/nodejs/node/pull/59311) +* \[[`68bec19f76`](https://github.com/nodejs/node/commit/68bec19f76)] - **test**: add known issue test for fs.cpSync dereference bug (James M Snell) [#58941](https://github.com/nodejs/node/pull/58941) +* \[[`a100cce379`](https://github.com/nodejs/node/commit/a100cce379)] - **test**: deflake stream-readable-to-web test (Ethan Arrowood) [#58948](https://github.com/nodejs/node/pull/58948) +* \[[`b7577d853b`](https://github.com/nodejs/node/commit/b7577d853b)] - **test**: make test-inspector-network-resource sequential (Shima Ryuhei) [#59104](https://github.com/nodejs/node/pull/59104) +* \[[`667ee82443`](https://github.com/nodejs/node/commit/667ee82443)] - **test**: don't use expose internals in test-http-outgoing-buffer.js (Meghan Denny) [#59219](https://github.com/nodejs/node/pull/59219) +* \[[`feec26d3bb`](https://github.com/nodejs/node/commit/feec26d3bb)] - **test**: use mustSucceed in test-fs-read (Sungwon) [#59204](https://github.com/nodejs/node/pull/59204) +* \[[`d7e23769ab`](https://github.com/nodejs/node/commit/d7e23769ab)] - **test**: prepare test-crypto-rsa-dsa for newer OpenSSL (Richard Lau) [#58100](https://github.com/nodejs/node/pull/58100) +* \[[`3a9aca91c6`](https://github.com/nodejs/node/commit/3a9aca91c6)] - **test**: fix flaky test-worker-message-port-transfer-filehandle test (Alex Yang) [#59158](https://github.com/nodejs/node/pull/59158) +* \[[`3aee7625b9`](https://github.com/nodejs/node/commit/3aee7625b9)] - **test**: expand linting rules around `assert` w literal messages (Anna Henningsen) [#59147](https://github.com/nodejs/node/pull/59147) +* \[[`667c2ced38`](https://github.com/nodejs/node/commit/667c2ced38)] - **test**: update WPT for WebCryptoAPI to ab08796857 (Node.js GitHub Bot) [#59129](https://github.com/nodejs/node/pull/59129) +* \[[`89ac344393`](https://github.com/nodejs/node/commit/89ac344393)] - **test**: update WPT for WebCryptoAPI to 19d82c57ab (Node.js GitHub Bot) [#59129](https://github.com/nodejs/node/pull/59129) +* \[[`d332957ac6`](https://github.com/nodejs/node/commit/d332957ac6)] - **test**: skip tests that cause timeouts on IBM i (Abdirahim Musse) [#59014](https://github.com/nodejs/node/pull/59014) +* \[[`a23562ff72`](https://github.com/nodejs/node/commit/a23562ff72)] - **test**: update `startCLI` to set `--port=0` by default (Dario Piotrowicz) [#59042](https://github.com/nodejs/node/pull/59042) +* \[[`4a12f5d83b`](https://github.com/nodejs/node/commit/4a12f5d83b)] - **test**: mark test-inspector-network-fetch as flaky on Windows (Joyee Cheung) [#59091](https://github.com/nodejs/node/pull/59091) +* \[[`ac4f7aa69c`](https://github.com/nodejs/node/commit/ac4f7aa69c)] - **test**: add missing port=0 arg in test-debugger-extract-function-name (Dario Piotrowicz) [#58977](https://github.com/nodejs/node/pull/58977) +* \[[`8dd09267e3`](https://github.com/nodejs/node/commit/8dd09267e3)] - **test,crypto**: skip unsupported ciphers (Shelley Vohr) [#59388](https://github.com/nodejs/node/pull/59388) +* \[[`45200b43ea`](https://github.com/nodejs/node/commit/45200b43ea)] - **tools**: update coverage GitHub Actions to fixed version (Rich Trott) [#59512](https://github.com/nodejs/node/pull/59512) +* \[[`8f2b8b3dc4`](https://github.com/nodejs/node/commit/8f2b8b3dc4)] - **tools**: allow selecting test subsystems with numbers in their names (Darshan Sen) [#59242](https://github.com/nodejs/node/pull/59242) +* \[[`f9bc2573dd`](https://github.com/nodejs/node/commit/f9bc2573dd)] - **tools**: clarify README linter error message (Joyee Cheung) [#59160](https://github.com/nodejs/node/pull/59160) +* \[[`cba0de128d`](https://github.com/nodejs/node/commit/cba0de128d)] - **tools**: add support for URLs to PR commits in `merge.sh` (Antoine du Hamel) [#59162](https://github.com/nodejs/node/pull/59162) +* \[[`039949ef5b`](https://github.com/nodejs/node/commit/039949ef5b)] - **tools**: bump @eslint/plugin-kit from 0.3.1 to 0.3.3 in /tools/eslint (dependabot\[bot]) [#59119](https://github.com/nodejs/node/pull/59119) +* \[[`6a8a73aa35`](https://github.com/nodejs/node/commit/6a8a73aa35)] - **tools**: ignore CVE mention when linting release proposals (Antoine du Hamel) [#59037](https://github.com/nodejs/node/pull/59037) +* \[[`d0f40f3a3a`](https://github.com/nodejs/node/commit/d0f40f3a3a)] - **tools,test**: enforce best practices to detect never settling promises (Antoine du Hamel) [#58992](https://github.com/nodejs/node/pull/58992) +* \[[`9d801a3f00`](https://github.com/nodejs/node/commit/9d801a3f00)] - **typings**: improve internal binding types (Nam Yooseong) [#59351](https://github.com/nodejs/node/pull/59351) +* \[[`6dbda6cb25`](https://github.com/nodejs/node/commit/6dbda6cb25)] - **typings**: improve internal binding types (Michaël Zasso) [#59176](https://github.com/nodejs/node/pull/59176) +* \[[`e22dddf859`](https://github.com/nodejs/node/commit/e22dddf859)] - **util**: respect nested formats in styleText (Alex Yang) [#59098](https://github.com/nodejs/node/pull/59098) +* \[[`491f390515`](https://github.com/nodejs/node/commit/491f390515)] - **worker**: add cpuUsage for worker (theanarkh) [#59177](https://github.com/nodejs/node/pull/59177) +* \[[`8e697d1884`](https://github.com/nodejs/node/commit/8e697d1884)] - **(SEMVER-MINOR)** **zlib**: add dictionary support to zstdCompress and zstdDecompress (lluisemper) [#59240](https://github.com/nodejs/node/pull/59240) + ## 2025-07-31, Version 22.18.0 'Jod' (LTS), @aduh95 diff --git a/doc/contributing/adding-new-napi-api.md b/doc/contributing/adding-new-napi-api.md index 196075be20aade..b17abb03ad7a5c 100644 --- a/doc/contributing/adding-new-napi-api.md +++ b/doc/contributing/adding-new-napi-api.md @@ -37,7 +37,7 @@ please follow these principles and guidelines: 2. **Review and approval** * A new API addition **must** be signed off by at least two Node-API team members. - * **Should** be implemented in at least one other VM implementation of Node.js. + * **Should** be implemented in terms of available VM APIs in at least one other VM implementation of Node.js. 3. **Experimental phase** * New APIs **must** be marked as experimental for at least one minor Node.js release before promotion. @@ -49,4 +49,4 @@ please follow these principles and guidelines: * Approval by the Node-API team. * Availability of a down-level implementation if backporting is needed. * Usage by a published real-world module. - * Implementation in an alternative VM. + * Implementable in an alternative VM. diff --git a/doc/contributing/adding-v8-fast-api.md b/doc/contributing/adding-v8-fast-api.md index 1869c12731f7f3..e26bc841121e66 100644 --- a/doc/contributing/adding-v8-fast-api.md +++ b/doc/contributing/adding-v8-fast-api.md @@ -26,15 +26,8 @@ for example, they may not trigger garbage collection. in a snapshot (either the built-in or a user-land one). Please refer to the [binding functions documentation](../../src/README.md#binding-functions) for more information. -* To test fast APIs, make sure to run the tests in a loop with a decent - iterations count to trigger relevant optimizations that prefer the fast API - over the slow one. -* In debug mode (`--debug` or `--debug-node` flags), the fast API calls can be - tracked using the `TRACK_V8_FAST_API_CALL("key")` macro. This can be used to - count how many times fast paths are taken during tests. The key is a global - identifier and should be unique across the codebase. - Use `"binding_name.function_name"` or `"binding_name.function_name.suffix"` to - ensure uniqueness. +* Fast API functions must be tested following the example in + [Test with Fast API path](#test-with-fast-api-path). * The fast callback must be idempotent up to the point where error and fallback conditions are checked, because otherwise executing the slow callback might produce visible side effects twice. @@ -175,40 +168,47 @@ A typical function that communicates between JavaScript and C++ is as follows. v8::FastApiCallbackOptions& options); ``` -* In the unit tests: - - Since the fast API function uses `TRACK_V8_FAST_API_CALL`, we can ensure that - the fast paths are taken and test them by writing tests that force - V8 optimizations and check the counters. - - ```js - // Flags: --expose-internals --no-warnings --allow-natives-syntax - 'use strict'; - const common = require('../common'); - - const { internalBinding } = require('internal/test/binding'); - // We could also require a function that uses the internal binding internally. - const { divide } = internalBinding('custom_namespace'); - - // The function that will be optimized. It has to be a function written in - // JavaScript. Since `divide` comes from the C++ side, we need to wrap it. - function testFastPath(a, b) { - return divide(a, b); - } - - eval('%PrepareFunctionForOptimization(testFastPath)'); - // This call will let V8 know about the argument types that the function expects. - assert.strictEqual(testFastPath(6, 3), 2); - - eval('%OptimizeFunctionOnNextCall(testFastPath)'); - assert.strictEqual(testFastPath(8, 2), 4); - assert.throws(() => testFastPath(1, 0), { - code: 'ERR_INVALID_STATE', - }); - - if (common.isDebug) { - const { getV8FastApiCallCount } = internalBinding('debug'); - assert.strictEqual(getV8FastApiCallCount('custom_namespace.divide.ok'), 1); - assert.strictEqual(getV8FastApiCallCount('custom_namespace.divide.error'), 1); - } - ``` +### Test with Fast API path + +In debug mode (`./configure --debug` or `./configure --debug-node` flags), the +fast API calls can be tracked using the `TRACK_V8_FAST_API_CALL("key")` macro. +This can be used to count how many times fast paths are taken during tests. The +key is a global identifier and should be unique across the codebase. +Use `"binding_name.function_name"` or `"binding_name.function_name.suffix"` to +ensure uniqueness. + +In the unit tests, since the fast API function uses `TRACK_V8_FAST_API_CALL`, +we can ensure that the fast paths are taken and test them by writing tests that +force V8 optimizations and check the counters. + +```js +// Flags: --expose-internals --no-warnings --allow-natives-syntax +'use strict'; +const common = require('../common'); + +const { internalBinding } = require('internal/test/binding'); +// We could also require a function that uses the internal binding internally. +const { divide } = internalBinding('custom_namespace'); + +// The function that will be optimized. It has to be a function written in +// JavaScript. Since `divide` comes from the C++ side, we need to wrap it. +function testFastPath(a, b) { + return divide(a, b); +} + +eval('%PrepareFunctionForOptimization(testFastPath)'); +// This call will let V8 know about the argument types that the function expects. +assert.strictEqual(testFastPath(6, 3), 2); + +eval('%OptimizeFunctionOnNextCall(testFastPath)'); +assert.strictEqual(testFastPath(8, 2), 4); +assert.throws(() => testFastPath(1, 0), { + code: 'ERR_INVALID_STATE', +}); + +if (common.isDebug) { + const { getV8FastApiCallCount } = internalBinding('debug'); + assert.strictEqual(getV8FastApiCallCount('custom_namespace.divide.ok'), 1); + assert.strictEqual(getV8FastApiCallCount('custom_namespace.divide.error'), 1); +} +``` diff --git a/doc/contributing/collaborator-guide.md b/doc/contributing/collaborator-guide.md index d883533d26e218..3d77e1e0fe7463 100644 --- a/doc/contributing/collaborator-guide.md +++ b/doc/contributing/collaborator-guide.md @@ -154,10 +154,13 @@ requirements. If a pull request meets all requirements except the Collaborators can object to a pull request by using the "Request Changes" GitHub feature. Dissent comments alone don't constitute an -objection. Any pull request objection must include a clear reason for that -objection, and the objector must remain responsive for further discussion -towards consensus about the direction of the pull request. Where possible, -provide a set of actionable steps alongside the objection. +objection, nor do dissenting comments made in any associated issue. +A blocking objection to a change must be made in the pull request that +specifically proposes that change. Any pull request objection must include +a clear reason for that objection, and the objector must remain responsive +for further discussion towards consensus about the direction of the pull +request. Where possible, provide a set of actionable steps alongside the +objection. If the objection is not clear to others, another collaborator can ask an objecting collaborator to explain their objection or to provide actionable @@ -168,7 +171,22 @@ dismiss the objection. Pull requests with outstanding objections must remain open until all objections are satisfied. If reaching consensus is not possible, a collaborator can escalate the issue to the TSC by pinging `@nodejs/tsc` and -adding the `tsc-agenda` label to the issue. +adding the `tsc-agenda` label to the issue. The change cannot proceed without +either reaching consensus or a TSC decision to dismiss the objection(s). If the +TSC does choose to dismiss any objections, a clear explanation of the reasoning +or a link to the public vote must be given in the pull request before it lands. + +All Collaborator objections are considered equal. There is no greater weight given to +objections from TSC members than from any other Collaborator. + +Mistakes do happen. If a pull request is merged with an unresolved objection, +submit a fix. Simple issues may be fixed with a follow-up PR that addresses +the concern. More difficult issues may require a full revert. Most corrections +can be fast-tracked. If deemed necessary take a slower route to ensure stability +and consensus. + +Collaborators objecting to a pull request can best ensure their objections +are addressed by remaining engaged and responsive in the discussion. #### Helpful resources diff --git a/doc/contributing/investigating-native-memory-leaks.md b/doc/contributing/investigating-native-memory-leaks.md index 4d565fe031244f..6639617b54c53c 100644 --- a/doc/contributing/investigating-native-memory-leaks.md +++ b/doc/contributing/investigating-native-memory-leaks.md @@ -396,7 +396,7 @@ call: } ``` -Create a file (eg. `node-12.14.1.supp`) with the contents of the suppression +Create a file (e.g. `node-12.14.1.supp`) with the contents of the suppression records, and run Valgrind with the suppression file previously created: ```bash diff --git a/doc/contributing/security-release-process.md b/doc/contributing/security-release-process.md index 6c2f8a3a6e2400..676d8fd408909a 100644 --- a/doc/contributing/security-release-process.md +++ b/doc/contributing/security-release-process.md @@ -37,6 +37,7 @@ The current security stewards are documented in the main Node.js | NodeSource | Rafael | 2024-Jul-08 | | NodeSource | Rafael | 2025-Jan-21 | | NodeSource | Rafael | 2025-May-14 | +| NodeSource | Rafael | 2025-July-15 | | Datadog | Bryan | | | IBM | Joe | | | Platformatic | Matteo | | diff --git a/doc/contributing/writing-and-running-benchmarks.md b/doc/contributing/writing-and-running-benchmarks.md index 1ff494c7c4d90f..5c9709bab882a3 100644 --- a/doc/contributing/writing-and-running-benchmarks.md +++ b/doc/contributing/writing-and-running-benchmarks.md @@ -9,6 +9,7 @@ * [Benchmark analysis requirements](#benchmark-analysis-requirements) * [Running benchmarks](#running-benchmarks) * [Running individual benchmarks](#running-individual-benchmarks) + * [Calibrating the number of iterations with calibrate-n.js](#calibrating-the-number-of-iterations-with-calibrate-njs) * [Running all benchmarks](#running-all-benchmarks) * [Specifying CPU Cores for Benchmarks with run.js](#specifying-cpu-cores-for-benchmarks-with-runjs) * [Filtering benchmarks](#filtering-benchmarks) @@ -142,6 +143,46 @@ buffers/buffer-tostring.js n=10000000 len=1024 arg=true: 3498295.68561504 buffers/buffer-tostring.js n=10000000 len=1024 arg=false: 3783071.1678948295 ``` +### Calibrating the number of iterations with calibrate-n.js + +Before running benchmarks, it's often useful to determine the optimal number of iterations (`n`) +that provides statistically stable results. The `calibrate-n.js` tool helps find this value by +running a benchmark multiple times with increasing `n` values until the coefficient of variation (CV) +falls below a target threshold. + +```console +$ node benchmark/calibrate-n.js benchmark/buffers/buffer-compare.js + +-------------------------------------------------------- +Benchmark: buffers/buffer-compare.js +-------------------------------------------------------- +What we are trying to find: The optimal number of iterations (n) +that produces consistent benchmark results without wasting time. + +How it works: +1. Run the benchmark multiple times with a specific n value +2. Group results by configuration +3. If overall CV is above 5% or any configuration has CV above 10%, increase n and try again +4. Stop when we have stable results (overall CV < 5% and all configs CV < 10%) or max increases reached + +Configuration: +- Starting n: 10 iterations +- Runs per n value: 30 +- Target CV threshold: 5% (lower CV = more stable results) +- Max increases: 6 +- Increase factor: 10x +``` + +The tool accepts several options: + +* `--runs=N`: Number of runs for each n value (default: 30) +* `--cv-threshold=N`: Target coefficient of variation threshold (default: 0.05) +* `--max-increases=N`: Maximum number of n increases to try (default: 6) +* `--start-n=N`: Initial n value to start with (default: 10) +* `--increase=N`: Factor by which to increase n (default: 10) + +Once you've determined a stable `n` value, you can use it when running your benchmarks. + ### Running all benchmarks Similar to running individual benchmarks, a group of benchmarks can be executed diff --git a/doc/contributing/writing-tests.md b/doc/contributing/writing-tests.md index 8c67ed2c9410c4..7830bb3847184a 100644 --- a/doc/contributing/writing-tests.md +++ b/doc/contributing/writing-tests.md @@ -24,7 +24,7 @@ See [directory structure overview][] for outline of existing test and locations. ## How to write a good test -A good test should be written in a style that is optimial for debugging +A good test should be written in a style that is optimal for debugging when it fails. In principle, when adding a new test, it should be placed in a new file. diff --git a/doc/node-config-schema.json b/doc/node-config-schema.json index 14dfc3b22d0c65..35c3fdd52d6101 100644 --- a/doc/node-config-schema.json +++ b/doc/node-config-schema.json @@ -186,9 +186,6 @@ "experimental-vm-modules": { "type": "boolean" }, - "experimental-wasm-modules": { - "type": "boolean" - }, "experimental-websocket": { "type": "boolean" }, diff --git a/doc/node.1 b/doc/node.1 index f41323c799ad34..6913992a5476d2 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -234,8 +234,8 @@ Enable experimental ES module support in VM module. Enable experimental WebAssembly System Interface support. This flag is no longer required as WASI is enabled by default. . -.It Fl -experimental-wasm-modules -Enable experimental WebAssembly module support. +.It Fl -experimental-inspector-network-resource +Enable experimental support for inspector network resources. . .It Fl -force-context-aware Disable loading native addons that are not context-aware. @@ -610,6 +610,13 @@ See and .Ev SSL_CERT_FILE . . +.It Fl -use-system-ca +Use the trusted CA certificates present in the system store, in addition to the certificates in the +bundled Mozilla CA store and certificates from `NODE_EXTRA_CA_CERTS`. On Windows and macOS, it uses system APIs to +integrate additional trusted certificates. On other platforms it is similar to `--use-openssl-ca` with the difference +being that it caches the certificates on first load, and the system certificates are added in a complementary manner +without replacing certificates from other sources. This flag supersedes `--use-openssl-ca`. +. .It Fl -use-largepages Ns = Ns Ar mode Re-map the Node.js static code to large memory pages at startup. If supported on the target system, this will cause the Node.js static code to be moved onto 2 @@ -799,6 +806,12 @@ When set to .Ar 0 , TLS certificate validation is disabled. . +.It Ev NODE_USE_SYSTEM_CA +Similar to +.Fl -use-system-ca . +Use the trusted CA certificates present in the system store, in addition to the certificates in the +bundled Mozilla CA store and certificates from `NODE_EXTRA_CA_CERTS`. +. .It Ev NODE_V8_COVERAGE Ar dir When set, Node.js writes JavaScript code coverage information to .Ar dir . diff --git a/glossary.md b/glossary.md index 3c5369d5d9cde3..fe3b8b6e8804fd 100644 --- a/glossary.md +++ b/glossary.md @@ -99,6 +99,7 @@ This file documents various terms and definitions used throughout the Node.js co various aspects of the web ecosystem. * **WASI**: [Web Assembly System Interface][] - Interface for WebAssembly. * **WASM**: Web Assembly - Binary instruction format for a stack-based virtual machine. +* **WDYT**: What Do You Think? * **WG**: Working Group - Autonomous teams in the project with specific focus areas. * **WHATWG**: [Web Hypertext Application Technology Working Group][] - Community developing web standards. * **WIP**: Work In Progress - Unfinished work that may be worth an early look. diff --git a/lib/_http_client.js b/lib/_http_client.js index 00b59f357fa45d..0935191b3efc8b 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -78,6 +78,7 @@ const { const { validateInteger, validateBoolean, + validateString, } = require('internal/validators'); const { getTimerDuration } = require('internal/timers'); const { @@ -218,12 +219,11 @@ function ClientRequest(input, options, cb) { delete optsWithoutSignal.signal; } let method = options.method; - const methodIsString = (typeof method === 'string'); - if (method !== null && method !== undefined && !methodIsString) { - throw new ERR_INVALID_ARG_TYPE('options.method', 'string', method); + if (method != null) { + validateString(method, 'options.method'); } - if (methodIsString && method) { + if (method) { if (!checkIsHttpToken(method)) { throw new ERR_INVALID_HTTP_TOKEN('Method', method); } diff --git a/lib/_http_server.js b/lib/_http_server.js index b9dbfb537523e5..fa5407df18dba9 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -25,6 +25,7 @@ const { ArrayIsArray, Error, MathMin, + NumberIsFinite, ObjectKeys, ObjectSetPrototypeOf, ReflectApply, @@ -184,8 +185,6 @@ const kConnections = Symbol('http.server.connections'); const kConnectionsCheckingInterval = Symbol('http.server.connectionsCheckingInterval'); const HTTP_SERVER_TRACE_EVENT_NAME = 'http.server.request'; -// TODO(jazelly): make this configurable -const HTTP_SERVER_KEEP_ALIVE_TIMEOUT_BUFFER = 1000; class HTTPServerAsyncResource { constructor(type, socket) { @@ -484,6 +483,14 @@ function storeHTTPOptions(options) { this.keepAliveTimeout = 5_000; // 5 seconds; } + const keepAliveTimeoutBuffer = options.keepAliveTimeoutBuffer; + if (keepAliveTimeoutBuffer !== undefined) { + validateInteger(keepAliveTimeoutBuffer, 'keepAliveTimeoutBuffer', 0); + this.keepAliveTimeoutBuffer = keepAliveTimeoutBuffer; + } else { + this.keepAliveTimeoutBuffer = 1000; + } + const connectionsCheckingInterval = options.connectionsCheckingInterval; if (connectionsCheckingInterval !== undefined) { validateInteger(connectionsCheckingInterval, 'connectionsCheckingInterval', 0); @@ -546,6 +553,13 @@ function Server(options, requestListener) { } storeHTTPOptions.call(this, options); + + // Optional buffer added to the keep-alive timeout when setting socket timeouts. + // Helps reduce ECONNRESET errors from clients by extending the internal timeout. + // Default is 1000ms if not specified. + const buf = options.keepAliveTimeoutBuffer; + this.keepAliveTimeoutBuffer = + (typeof buf === 'number' && NumberIsFinite(buf) && buf >= 0) ? buf : 1000; net.Server.call( this, { allowHalfOpen: true, noDelay: options.noDelay ?? true, @@ -1012,9 +1026,10 @@ function resOnFinish(req, res, socket, state, server) { } } else if (state.outgoing.length === 0) { if (server.keepAliveTimeout && typeof socket.setTimeout === 'function') { - // Increase the internal timeout wrt the advertised value to reduce + // Extend the internal timeout by the configured buffer to reduce // the likelihood of ECONNRESET errors. - socket.setTimeout(server.keepAliveTimeout + HTTP_SERVER_KEEP_ALIVE_TIMEOUT_BUFFER); + // This allows fine-tuning beyond the advertised keepAliveTimeout. + socket.setTimeout(server.keepAliveTimeout + server.keepAliveTimeoutBuffer); state.keepAliveTimeoutSet = true; } } else { diff --git a/lib/assert.js b/lib/assert.js index af34cbfd3f4e42..f2ce5e9c0752e8 100644 --- a/lib/assert.js +++ b/lib/assert.js @@ -21,6 +21,7 @@ 'use strict'; const { + ArrayPrototypeForEach, ArrayPrototypeIndexOf, ArrayPrototypeJoin, ArrayPrototypePush, @@ -28,6 +29,7 @@ const { Error, NumberIsNaN, ObjectAssign, + ObjectDefineProperty, ObjectIs, ObjectKeys, ObjectPrototypeIsPrototypeOf, @@ -37,11 +39,13 @@ const { StringPrototypeIndexOf, StringPrototypeSlice, StringPrototypeSplit, + Symbol, } = primordials; const { codes: { ERR_AMBIGUOUS_ARGUMENT, + ERR_CONSTRUCT_CALL_REQUIRED, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_INVALID_RETURN_VALUE, @@ -54,14 +58,17 @@ const { isPromise, isRegExp, } = require('internal/util/types'); -const { isError, deprecate } = require('internal/util'); +const { isError, deprecate, setOwnProperty } = require('internal/util'); const { innerOk } = require('internal/assert/utils'); const CallTracker = require('internal/assert/calltracker'); const { validateFunction, + validateOneOf, } = require('internal/validators'); +const kOptions = Symbol('options'); + let isDeepEqual; let isDeepStrictEqual; let isPartialStrictEqual; @@ -83,12 +90,60 @@ const assert = module.exports = ok; const NO_EXCEPTION_SENTINEL = {}; +/** + * Assert options. + * @typedef {object} AssertOptions + * @property {'full'|'simple'} [diff='simple'] - If set to 'full', shows the full diff in assertion errors. + * @property {boolean} [strict=true] - If set to true, non-strict methods behave like their corresponding + * strict methods. + */ + +/** + * @class Assert + * @param {AssertOptions} [options] - Optional configuration for assertions. + * @throws {ERR_CONSTRUCT_CALL_REQUIRED} If not called with `new`. + */ +function Assert(options) { + if (!new.target) { + throw new ERR_CONSTRUCT_CALL_REQUIRED('Assert'); + } + + options = ObjectAssign({ __proto__: null, strict: true }, options); + + const allowedDiffs = ['simple', 'full']; + if (options.diff !== undefined) { + validateOneOf(options.diff, 'options.diff', allowedDiffs); + } + + this.AssertionError = AssertionError; + ObjectDefineProperty(this, kOptions, { + __proto__: null, + value: options, + enumerable: false, + configurable: false, + writable: false, + }); + + if (options.strict) { + this.equal = this.strictEqual; + this.deepEqual = this.deepStrictEqual; + this.notEqual = this.notStrictEqual; + this.notDeepEqual = this.notDeepStrictEqual; + } +} + // All of the following functions must throw an AssertionError // when a corresponding condition is not met, with a message that // may be undefined if not provided. All assertion methods provide // both the actual and expected values to the assertion error for // display purposes. +// DESTRUCTURING WARNING: All Assert.prototype methods use optional chaining +// (this?.[kOptions]) to safely access instance configuration. When methods are +// destructured from an Assert instance (e.g., const {strictEqual} = myAssert), +// they lose their `this` context and will use default behavior instead of the +// instance's custom options. + function innerFail(obj) { if (obj.message instanceof Error) throw obj.message; @@ -102,7 +157,7 @@ function innerFail(obj) { * @param {string} [operator] * @param {Function} [stackStartFn] */ -function fail(actual, expected, message, operator, stackStartFn) { +Assert.prototype.fail = function fail(actual, expected, message, operator, stackStartFn) { const argsLen = arguments.length; let internalMessage = false; @@ -128,21 +183,24 @@ function fail(actual, expected, message, operator, stackStartFn) { if (message instanceof Error) throw message; + // IMPORTANT: When adding new references to `this`, ensure they use optional chaining + // (this?.[kOptions]?.diff) to handle cases where the method is destructured from an + // Assert instance and loses its context. Destructured methods will fall back + // to default behavior when `this` is undefined. const errArgs = { actual, expected, operator: operator === undefined ? 'fail' : operator, stackStartFn: stackStartFn || fail, message, + diff: this?.[kOptions]?.diff, }; const err = new AssertionError(errArgs); if (internalMessage) { err.generatedMessage = true; } throw err; -} - -assert.fail = fail; +}; // The AssertionError is defined in internal/error. assert.AssertionError = AssertionError; @@ -156,7 +214,17 @@ assert.AssertionError = AssertionError; function ok(...args) { innerOk(ok, args.length, ...args); } -assert.ok = ok; + +/** + * Pure assertion tests whether a value is truthy, as determined + * by !!value. + * Duplicated as the other `ok` function is supercharged and exposed as default export. + * @param {...any} args + * @returns {void} + */ +Assert.prototype.ok = function ok(...args) { + innerOk(ok, args.length, ...args); +}; /** * The equality assertion tests shallow, coercive equality with ==. @@ -165,8 +233,7 @@ assert.ok = ok; * @param {string | Error} [message] * @returns {void} */ -/* eslint-disable no-restricted-properties */ -assert.equal = function equal(actual, expected, message) { +Assert.prototype.equal = function equal(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -178,6 +245,7 @@ assert.equal = function equal(actual, expected, message) { message, operator: '==', stackStartFn: equal, + diff: this?.[kOptions]?.diff, }); } }; @@ -190,7 +258,7 @@ assert.equal = function equal(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.notEqual = function notEqual(actual, expected, message) { +Assert.prototype.notEqual = function notEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -202,6 +270,7 @@ assert.notEqual = function notEqual(actual, expected, message) { message, operator: '!=', stackStartFn: notEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -213,7 +282,7 @@ assert.notEqual = function notEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.deepEqual = function deepEqual(actual, expected, message) { +Assert.prototype.deepEqual = function deepEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -225,6 +294,7 @@ assert.deepEqual = function deepEqual(actual, expected, message) { message, operator: 'deepEqual', stackStartFn: deepEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -236,7 +306,7 @@ assert.deepEqual = function deepEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.notDeepEqual = function notDeepEqual(actual, expected, message) { +Assert.prototype.notDeepEqual = function notDeepEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -248,10 +318,10 @@ assert.notDeepEqual = function notDeepEqual(actual, expected, message) { message, operator: 'notDeepEqual', stackStartFn: notDeepEqual, + diff: this?.[kOptions]?.diff, }); } }; -/* eslint-enable */ /** * The deep strict equivalence assertion tests a deep strict equality @@ -261,7 +331,7 @@ assert.notDeepEqual = function notDeepEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { +Assert.prototype.deepStrictEqual = function deepStrictEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -273,6 +343,7 @@ assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { message, operator: 'deepStrictEqual', stackStartFn: deepStrictEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -285,7 +356,7 @@ assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.notDeepStrictEqual = notDeepStrictEqual; +Assert.prototype.notDeepStrictEqual = notDeepStrictEqual; function notDeepStrictEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); @@ -298,6 +369,7 @@ function notDeepStrictEqual(actual, expected, message) { message, operator: 'notDeepStrictEqual', stackStartFn: notDeepStrictEqual, + diff: this?.[kOptions]?.diff, }); } } @@ -309,7 +381,7 @@ function notDeepStrictEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.strictEqual = function strictEqual(actual, expected, message) { +Assert.prototype.strictEqual = function strictEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -320,6 +392,7 @@ assert.strictEqual = function strictEqual(actual, expected, message) { message, operator: 'strictEqual', stackStartFn: strictEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -331,7 +404,7 @@ assert.strictEqual = function strictEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.notStrictEqual = function notStrictEqual(actual, expected, message) { +Assert.prototype.notStrictEqual = function notStrictEqual(actual, expected, message) { if (arguments.length < 2) { throw new ERR_MISSING_ARGS('actual', 'expected'); } @@ -342,6 +415,7 @@ assert.notStrictEqual = function notStrictEqual(actual, expected, message) { message, operator: 'notStrictEqual', stackStartFn: notStrictEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -353,7 +427,7 @@ assert.notStrictEqual = function notStrictEqual(actual, expected, message) { * @param {string | Error} [message] * @returns {void} */ -assert.partialDeepStrictEqual = function partialDeepStrictEqual( +Assert.prototype.partialDeepStrictEqual = function partialDeepStrictEqual( actual, expected, message, @@ -369,6 +443,7 @@ assert.partialDeepStrictEqual = function partialDeepStrictEqual( message, operator: 'partialDeepStrictEqual', stackStartFn: partialDeepStrictEqual, + diff: this?.[kOptions]?.diff, }); } }; @@ -402,6 +477,7 @@ function compareExceptionKey(actual, expected, key, message, keys, fn) { expected: b, operator: 'deepStrictEqual', stackStartFn: fn, + diff: this?.[kOptions]?.diff, }); err.actual = actual; err.expected = expected; @@ -414,6 +490,7 @@ function compareExceptionKey(actual, expected, key, message, keys, fn) { message, operator: fn.name, stackStartFn: fn, + diff: this?.[kOptions]?.diff, }); } } @@ -443,6 +520,7 @@ function expectedException(actual, expected, message, fn) { message, operator: 'deepStrictEqual', stackStartFn: fn, + diff: this?.[kOptions]?.diff, }); err.operator = fn.name; throw err; @@ -518,6 +596,7 @@ function expectedException(actual, expected, message, fn) { message, operator: fn.name, stackStartFn: fn, + diff: this?.[kOptions]?.diff, }); err.generatedMessage = generatedMessage; throw err; @@ -605,20 +684,21 @@ function expectsError(stackStartFn, actual, error, message) { details += ` (${error.name})`; } details += message ? `: ${message}` : '.'; - const fnType = stackStartFn === assert.rejects ? 'rejection' : 'exception'; + const fnType = stackStartFn === Assert.prototype.rejects ? 'rejection' : 'exception'; innerFail({ actual: undefined, expected: error, operator: stackStartFn.name, message: `Missing expected ${fnType}${details}`, stackStartFn, + diff: this?.[kOptions]?.diff, }); } if (!error) return; - expectedException(actual, error, message, stackStartFn); + expectedException.call(this, actual, error, message, stackStartFn); } function hasMatchingError(actual, expected) { @@ -652,7 +732,7 @@ function expectsNoError(stackStartFn, actual, error, message) { if (!error || hasMatchingError(actual, error)) { const details = message ? `: ${message}` : '.'; - const fnType = stackStartFn === assert.doesNotReject ? + const fnType = stackStartFn === Assert.prototype.doesNotReject ? 'rejection' : 'exception'; innerFail({ actual, @@ -661,6 +741,7 @@ function expectsNoError(stackStartFn, actual, error, message) { message: `Got unwanted ${fnType}${details}\n` + `Actual message: "${actual?.message}"`, stackStartFn, + diff: this?.[kOptions]?.diff, }); } throw actual; @@ -672,7 +753,7 @@ function expectsNoError(stackStartFn, actual, error, message) { * @param {...any} [args] * @returns {void} */ -assert.throws = function throws(promiseFn, ...args) { +Assert.prototype.throws = function throws(promiseFn, ...args) { expectsError(throws, getActual(promiseFn), ...args); }; @@ -682,7 +763,7 @@ assert.throws = function throws(promiseFn, ...args) { * @param {...any} [args] * @returns {Promise} */ -assert.rejects = async function rejects(promiseFn, ...args) { +Assert.prototype.rejects = async function rejects(promiseFn, ...args) { expectsError(rejects, await waitForActual(promiseFn), ...args); }; @@ -692,7 +773,7 @@ assert.rejects = async function rejects(promiseFn, ...args) { * @param {...any} [args] * @returns {void} */ -assert.doesNotThrow = function doesNotThrow(fn, ...args) { +Assert.prototype.doesNotThrow = function doesNotThrow(fn, ...args) { expectsNoError(doesNotThrow, getActual(fn), ...args); }; @@ -702,7 +783,7 @@ assert.doesNotThrow = function doesNotThrow(fn, ...args) { * @param {...any} [args] * @returns {Promise} */ -assert.doesNotReject = async function doesNotReject(fn, ...args) { +Assert.prototype.doesNotReject = async function doesNotReject(fn, ...args) { expectsNoError(doesNotReject, await waitForActual(fn), ...args); }; @@ -711,7 +792,7 @@ assert.doesNotReject = async function doesNotReject(fn, ...args) { * @param {any} err * @returns {void} */ -assert.ifError = function ifError(err) { +Assert.prototype.ifError = function ifError(err) { if (err !== null && err !== undefined) { let message = 'ifError got unwanted exception: '; if (typeof err === 'object' && typeof err.message === 'string') { @@ -730,6 +811,7 @@ assert.ifError = function ifError(err) { operator: 'ifError', message, stackStartFn: ifError, + diff: this?.[kOptions]?.diff, }); // Make sure we actually have a stack trace! @@ -772,7 +854,7 @@ function internalMatch(string, regexp, message, fn) { 'regexp', 'RegExp', regexp, ); } - const match = fn === assert.match; + const match = fn === Assert.prototype.match; if (typeof string !== 'string' || RegExpPrototypeExec(regexp, string) !== null !== match) { if (message instanceof Error) { @@ -795,6 +877,7 @@ function internalMatch(string, regexp, message, fn) { message, operator: fn.name, stackStartFn: fn, + diff: this?.[kOptions]?.diff, }); err.generatedMessage = generatedMessage; throw err; @@ -808,7 +891,7 @@ function internalMatch(string, regexp, message, fn) { * @param {string | Error} [message] * @returns {void} */ -assert.match = function match(string, regexp, message) { +Assert.prototype.match = function match(string, regexp, message) { internalMatch(string, regexp, message, match); }; @@ -819,7 +902,7 @@ assert.match = function match(string, regexp, message) { * @param {string | Error} [message] * @returns {void} */ -assert.doesNotMatch = function doesNotMatch(string, regexp, message) { +Assert.prototype.doesNotMatch = function doesNotMatch(string, regexp, message) { internalMatch(string, regexp, message, doesNotMatch); }; @@ -834,6 +917,17 @@ function strict(...args) { innerOk(strict, args.length, ...args); } +// TODO(aduh95): take `ok` from `Assert.prototype` instead of a self-ref in a next major. +assert.ok = assert; +ArrayPrototypeForEach([ + 'fail', 'equal', 'notEqual', 'deepEqual', 'notDeepEqual', + 'deepStrictEqual', 'notDeepStrictEqual', 'strictEqual', + 'notStrictEqual', 'partialDeepStrictEqual', 'match', 'doesNotMatch', + 'throws', 'rejects', 'doesNotThrow', 'doesNotReject', 'ifError', +], (name) => { + setOwnProperty(assert, name, Assert.prototype[name]); +}); + assert.strict = ObjectAssign(strict, assert, { equal: assert.strictEqual, deepEqual: assert.deepStrictEqual, @@ -841,4 +935,7 @@ assert.strict = ObjectAssign(strict, assert, { notDeepEqual: assert.notDeepStrictEqual, }); +assert.strict.Assert = Assert; assert.strict.strict = assert.strict; + +assert.Assert = Assert; diff --git a/lib/async_hooks.js b/lib/async_hooks.js index 7dc7bfab328498..4216ee3f240c48 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -262,10 +262,10 @@ class AsyncResource { enumerable: true, get: deprecate(function() { return self; - }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172', false), set: deprecate(function(val) { self = val; - }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172', false), }, }); return bound; diff --git a/lib/domain.js b/lib/domain.js index 03d240e98d4506..7dd16ee1bf59ef 100644 --- a/lib/domain.js +++ b/lib/domain.js @@ -270,7 +270,7 @@ Domain.prototype._errorHandler = function(er) { // as this would throw an error, make the process exit, and thus // prevent the process 'uncaughtException' event from being emitted // if a listener is set. - if (EventEmitter.listenerCount(this, 'error') > 0) { + if (this.listenerCount('error') > 0) { // Clear the uncaughtExceptionCaptureCallback so that we know that, since // the top-level domain is not active anymore, it would be ok to abort on // an uncaught exception at this point diff --git a/lib/events.js b/lib/events.js index 48269193b37bcf..d1383bf251f1fc 100644 --- a/lib/events.js +++ b/lib/events.js @@ -28,6 +28,7 @@ const { ArrayPrototypeSlice, ArrayPrototypeSplice, ArrayPrototypeUnshift, + AsyncIteratorPrototype, Boolean, Error, ErrorCaptureStackTrace, @@ -1011,9 +1012,6 @@ async function once(emitter, name, options = kEmptyObject) { }); } -const AsyncIteratorPrototype = ObjectGetPrototypeOf( - ObjectGetPrototypeOf(async function* () {}).prototype); - function createIterResult(value, done) { return { value, done }; } diff --git a/lib/inspector.js b/lib/inspector.js index 24a5d62318debc..48daa2ea0c912a 100644 --- a/lib/inspector.js +++ b/lib/inspector.js @@ -39,6 +39,9 @@ const { } = require('internal/validators'); const { isMainThread } = require('worker_threads'); const { _debugEnd } = internalBinding('process_methods'); +const { + put, +} = require('internal/inspector/network_resources'); const { Connection, @@ -221,6 +224,10 @@ const Network = { dataReceived: (params) => broadcastToFrontend('Network.dataReceived', params), }; +const NetworkResources = { + put, +}; + module.exports = { open: inspectorOpen, close: _debugEnd, @@ -229,4 +236,5 @@ module.exports = { console, Session, Network, + NetworkResources, }; diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index d654ca5038bbab..5c15b96b12d1ea 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -178,7 +178,7 @@ function isSimpleDiff(actual, inspectedActual, expected, inspectedExpected) { return typeof actual !== 'object' || actual === null || typeof expected !== 'object' || expected === null; } -function createErrDiff(actual, expected, operator, customMessage) { +function createErrDiff(actual, expected, operator, customMessage, diffType = 'simple') { operator = checkOperator(actual, expected, operator); let skipped = false; @@ -202,7 +202,7 @@ function createErrDiff(actual, expected, operator, customMessage) { } else if (inspectedActual === inspectedExpected) { // Handles the case where the objects are structurally the same but different references operator = 'notIdentical'; - if (inspectedSplitActual.length > 50) { + if (inspectedSplitActual.length > 50 && diffType !== 'full') { message = `${ArrayPrototypeJoin(ArrayPrototypeSlice(inspectedSplitActual, 0, 50), '\n')}\n...}`; skipped = true; } else { @@ -252,6 +252,7 @@ class AssertionError extends Error { details, // Compatibility with older versions. stackStartFunction, + diff = 'simple', } = options; let { actual, @@ -263,7 +264,7 @@ class AssertionError extends Error { if (message != null) { if (kMethodsWithCustomMessageDiff.includes(operator)) { - super(createErrDiff(actual, expected, operator, message)); + super(createErrDiff(actual, expected, operator, message, diff)); } else { super(String(message)); } @@ -283,7 +284,7 @@ class AssertionError extends Error { } if (kMethodsWithCustomMessageDiff.includes(operator)) { - super(createErrDiff(actual, expected, operator, message)); + super(createErrDiff(actual, expected, operator, message, diff)); } else if (operator === 'notDeepStrictEqual' || operator === 'notStrictEqual') { // In case the objects are equal but the operator requires unequal, show @@ -300,8 +301,7 @@ class AssertionError extends Error { } // Only remove lines in case it makes sense to collapse those. - // TODO: Accept env to always show the full error. - if (res.length > 50) { + if (res.length > 50 && diff !== 'full') { res[46] = `${colors.blue}...${colors.white}`; while (res.length > 47) { ArrayPrototypePop(res); @@ -320,15 +320,15 @@ class AssertionError extends Error { const knownOperator = kReadableOperator[operator]; if (operator === 'notDeepEqual' && res === other) { res = `${knownOperator}\n\n${res}`; - if (res.length > 1024) { + if (res.length > 1024 && diff !== 'full') { res = `${StringPrototypeSlice(res, 0, 1021)}...`; } super(res); } else { - if (res.length > kMaxLongStringLength) { + if (res.length > kMaxLongStringLength && diff !== 'full') { res = `${StringPrototypeSlice(res, 0, 509)}...`; } - if (other.length > kMaxLongStringLength) { + if (other.length > kMaxLongStringLength && diff !== 'full') { other = `${StringPrototypeSlice(other, 0, 509)}...`; } if (operator === 'deepEqual') { @@ -378,6 +378,7 @@ class AssertionError extends Error { this.stack; // eslint-disable-line no-unused-expressions // Reset the name. this.name = 'AssertionError'; + this.diff = diff; } toString() { diff --git a/lib/internal/assert/utils.js b/lib/internal/assert/utils.js index d059fa89baf7d4..13e41d67c635c2 100644 --- a/lib/internal/assert/utils.js +++ b/lib/internal/assert/utils.js @@ -222,7 +222,8 @@ function getErrMessage(message, fn) { } const frames = StringPrototypeSplit(message, '\n'); message = ArrayPrototypeShift(frames); - for (const frame of frames) { + for (let i = 0; i < frames.length; i++) { + const frame = frames[i]; let pos = 0; while (pos < column && (frame[pos] === ' ' || frame[pos] === '\t')) { pos++; diff --git a/lib/internal/blocklist.js b/lib/internal/blocklist.js index a992a6b8f0703c..552819405a1a60 100644 --- a/lib/internal/blocklist.js +++ b/lib/internal/blocklist.js @@ -1,7 +1,10 @@ 'use strict'; const { + ArrayIsArray, Boolean, + JSONParse, + NumberParseInt, ObjectSetPrototypeOf, Symbol, } = primordials; @@ -32,6 +35,7 @@ const { owner_symbol } = internalBinding('symbols'); const { ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, } = require('internal/errors').codes; const { validateInt32, validateString } = require('internal/validators'); @@ -139,10 +143,130 @@ class BlockList { return Boolean(this[kHandle].check(address[kSocketAddressHandle])); } + /* + * @param {string[]} data + * @example + * const data = [ + * // IPv4 examples + * 'Subnet: IPv4 192.168.1.0/24', + * 'Address: IPv4 10.0.0.5', + * 'Range: IPv4 192.168.2.1-192.168.2.10', + * 'Range: IPv4 10.0.0.1-10.0.0.10', + * + * // IPv6 examples + * 'Subnet: IPv6 2001:0db8:85a3:0000:0000:8a2e:0370:7334/64', + * 'Address: IPv6 2001:0db8:85a3:0000:0000:8a2e:0370:7334', + * 'Range: IPv6 2001:0db8:85a3:0000:0000:8a2e:0370:7334-2001:0db8:85a3:0000:0000:8a2e:0370:7335', + * 'Subnet: IPv6 2001:db8:1234::/48', + * 'Address: IPv6 2001:db8:1234::1', + * 'Range: IPv6 2001:db8:1234::1-2001:db8:1234::10' + * ]; + */ + #parseIPInfo(data) { + for (const item of data) { + if (item.includes('IPv4')) { + const subnetMatch = item.match( + /Subnet: IPv4 (\d{1,3}(?:\.\d{1,3}){3})\/(\d{1,2})/, + ); + if (subnetMatch) { + const { 1: network, 2: prefix } = subnetMatch; + this.addSubnet(network, NumberParseInt(prefix)); + continue; + } + const addressMatch = item.match(/Address: IPv4 (\d{1,3}(?:\.\d{1,3}){3})/); + if (addressMatch) { + const { 1: address } = addressMatch; + this.addAddress(address); + continue; + } + + const rangeMatch = item.match( + /Range: IPv4 (\d{1,3}(?:\.\d{1,3}){3})-(\d{1,3}(?:\.\d{1,3}){3})/, + ); + if (rangeMatch) { + const { 1: start, 2: end } = rangeMatch; + this.addRange(start, end); + continue; + } + } + // IPv6 parsing with support for compressed addresses + if (item.includes('IPv6')) { + // IPv6 subnet pattern: supports both full and compressed formats + // Examples: + // - 2001:0db8:85a3:0000:0000:8a2e:0370:7334/64 (full) + // - 2001:db8:85a3::8a2e:370:7334/64 (compressed) + // - 2001:db8:85a3::192.0.2.128/64 (mixed) + const ipv6SubnetMatch = item.match( + /Subnet: IPv6 ([0-9a-fA-F:]{1,39})\/([0-9]{1,3})/i, + ); + if (ipv6SubnetMatch) { + const { 1: network, 2: prefix } = ipv6SubnetMatch; + this.addSubnet(network, NumberParseInt(prefix), 'ipv6'); + continue; + } + + // IPv6 address pattern: supports both full and compressed formats + // Examples: + // - 2001:0db8:85a3:0000:0000:8a2e:0370:7334 (full) + // - 2001:db8:85a3::8a2e:370:7334 (compressed) + // - 2001:db8:85a3::192.0.2.128 (mixed) + const ipv6AddressMatch = item.match(/Address: IPv6 ([0-9a-fA-F:]{1,39})/i); + if (ipv6AddressMatch) { + const { 1: address } = ipv6AddressMatch; + this.addAddress(address, 'ipv6'); + continue; + } + + // IPv6 range pattern: supports both full and compressed formats + // Examples: + // - 2001:0db8:85a3:0000:0000:8a2e:0370:7334-2001:0db8:85a3:0000:0000:8a2e:0370:7335 (full) + // - 2001:db8:85a3::8a2e:370:7334-2001:db8:85a3::8a2e:370:7335 (compressed) + // - 2001:db8:85a3::192.0.2.128-2001:db8:85a3::192.0.2.129 (mixed) + const ipv6RangeMatch = item.match(/Range: IPv6 ([0-9a-fA-F:]{1,39})-([0-9a-fA-F:]{1,39})/i); + if (ipv6RangeMatch) { + const { 1: start, 2: end } = ipv6RangeMatch; + this.addRange(start, end, 'ipv6'); + continue; + } + } + } + } + + + toJSON() { + return this.rules; + } + + fromJSON(data) { + // The data argument must be a string, or an array of strings that + // is JSON parseable. + if (ArrayIsArray(data)) { + for (const n of data) { + if (typeof n !== 'string') { + throw new ERR_INVALID_ARG_TYPE('data', ['string', 'string[]'], data); + } + } + } else if (typeof data !== 'string') { + throw new ERR_INVALID_ARG_TYPE('data', ['string', 'string[]'], data); + } else { + data = JSONParse(data); + if (!ArrayIsArray(data)) { + throw new ERR_INVALID_ARG_TYPE('data', ['string', 'string[]'], data); + } + for (const n of data) { + if (typeof n !== 'string') { + throw new ERR_INVALID_ARG_TYPE('data', ['string', 'string[]'], data); + } + } + } + + this.#parseIPInfo(data); + } + + get rules() { return this[kHandle].getRules(); } - [kClone]() { const handle = this[kHandle]; return { diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index dd9e3e58d72fb9..1a39b9f15e689e 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -173,6 +173,7 @@ const rawMethods = internalBinding('process_methods'); process.loadEnvFile = wrapped.loadEnvFile; process._rawDebug = wrapped._rawDebug; process.cpuUsage = wrapped.cpuUsage; + process.threadCpuUsage = wrapped.threadCpuUsage; process.resourceUsage = wrapped.resourceUsage; process.memoryUsage = wrapped.memoryUsage; process.constrainedMemory = rawMethods.constrainedMemory; diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index d2f1111a7155bd..b7d1abf4a85daf 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -285,9 +285,7 @@ function aesImportKey( break; } default: - throw lazyDOMException( - `Unable to import AES key with format ${format}`, - 'NotSupportedError'); + return undefined; } if (length === undefined) { diff --git a/lib/internal/crypto/cfrg.js b/lib/internal/crypto/cfrg.js index 34d8e294163fc7..e8af5750a865fd 100644 --- a/lib/internal/crypto/cfrg.js +++ b/lib/internal/crypto/cfrg.js @@ -324,6 +324,8 @@ function cfrgImportKey( keyObject = createCFRGRawKey(name, keyData, true); break; } + default: + return undefined; } if (keyObject.asymmetricKeyType !== name.toLowerCase()) { diff --git a/lib/internal/crypto/ec.js b/lib/internal/crypto/ec.js index 6c73344ef17e1c..f4ea317b86ee73 100644 --- a/lib/internal/crypto/ec.js +++ b/lib/internal/crypto/ec.js @@ -250,6 +250,8 @@ function ecImportKey( keyObject = createECPublicKeyRaw(namedCurve, keyData); break; } + default: + return undefined; } switch (algorithm.name) { diff --git a/lib/internal/crypto/keys.js b/lib/internal/crypto/keys.js index 7529254685fb17..7f21dd671cbc68 100644 --- a/lib/internal/crypto/keys.js +++ b/lib/internal/crypto/keys.js @@ -922,15 +922,11 @@ function importGenericSecretKey( keyObject = createSecretKey(keyData); break; } + default: + return undefined; } - if (keyObject) { - return new InternalCryptoKey(keyObject, { name }, keyUsages, false); - } - - throw lazyDOMException( - `Unable to import ${name} key with format ${format}`, - 'NotSupportedError'); + return new InternalCryptoKey(keyObject, { name }, keyUsages, false); } module.exports = { diff --git a/lib/internal/crypto/mac.js b/lib/internal/crypto/mac.js index 3e232fe1ca6491..0f9b1f9618d260 100644 --- a/lib/internal/crypto/mac.js +++ b/lib/internal/crypto/mac.js @@ -143,7 +143,7 @@ function hmacImportKey( break; } default: - throw lazyDOMException(`Unable to import HMAC key with format ${format}`); + return undefined; } const { length } = keyObject[kHandle].keyDetail({}); diff --git a/lib/internal/crypto/rsa.js b/lib/internal/crypto/rsa.js index a3ef89ed6f2e7e..bf6c341c5a723d 100644 --- a/lib/internal/crypto/rsa.js +++ b/lib/internal/crypto/rsa.js @@ -305,9 +305,7 @@ function rsaImportKey( break; } default: - throw lazyDOMException( - `Unable to import RSA key with format ${format}`, - 'NotSupportedError'); + return undefined; } if (keyObject.asymmetricKeyType !== 'rsa') { diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index 2eba29333bcba4..c7e0c31a4cc609 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -189,6 +189,20 @@ const kSupportedAlgorithms = { 'Ed25519': null, 'X25519': null, }, + 'exportKey': { + 'RSASSA-PKCS1-v1_5': null, + 'RSA-PSS': null, + 'RSA-OAEP': null, + 'ECDSA': null, + 'ECDH': null, + 'HMAC': null, + 'AES-CTR': null, + 'AES-CBC': null, + 'AES-GCM': null, + 'AES-KW': null, + 'Ed25519': null, + 'X25519': null, + }, 'sign': { 'RSASSA-PKCS1-v1_5': null, 'RSA-PSS': 'RsaPssParams', @@ -259,12 +273,14 @@ const experimentalAlgorithms = ObjectEntries({ generateKey: null, importKey: null, deriveBits: 'EcdhKeyDeriveParams', + exportKey: null, }, 'Ed448': { generateKey: null, sign: 'Ed448Params', verify: 'Ed448Params', importKey: null, + exportKey: null, }, }); diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index 04c040afd84629..facffcd741e407 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -327,19 +327,13 @@ async function exportKeySpki(key) { case 'RSA-PSS': // Fall through case 'RSA-OAEP': - if (key.type === 'public') { - return require('internal/crypto/rsa') - .rsaExportKey(key, kWebCryptoKeyFormatSPKI); - } - break; + return require('internal/crypto/rsa') + .rsaExportKey(key, kWebCryptoKeyFormatSPKI); case 'ECDSA': // Fall through case 'ECDH': - if (key.type === 'public') { - return require('internal/crypto/ec') - .ecExportKey(key, kWebCryptoKeyFormatSPKI); - } - break; + return require('internal/crypto/ec') + .ecExportKey(key, kWebCryptoKeyFormatSPKI); case 'Ed25519': // Fall through case 'Ed448': @@ -347,16 +341,11 @@ async function exportKeySpki(key) { case 'X25519': // Fall through case 'X448': - if (key.type === 'public') { - return require('internal/crypto/cfrg') - .cfrgExportKey(key, kWebCryptoKeyFormatSPKI); - } - break; + return require('internal/crypto/cfrg') + .cfrgExportKey(key, kWebCryptoKeyFormatSPKI); + default: + return undefined; } - - throw lazyDOMException( - `Unable to export a raw ${key.algorithm.name} ${key.type} key`, - 'InvalidAccessError'); } async function exportKeyPkcs8(key) { @@ -366,19 +355,13 @@ async function exportKeyPkcs8(key) { case 'RSA-PSS': // Fall through case 'RSA-OAEP': - if (key.type === 'private') { - return require('internal/crypto/rsa') - .rsaExportKey(key, kWebCryptoKeyFormatPKCS8); - } - break; + return require('internal/crypto/rsa') + .rsaExportKey(key, kWebCryptoKeyFormatPKCS8); case 'ECDSA': // Fall through case 'ECDH': - if (key.type === 'private') { - return require('internal/crypto/ec') - .ecExportKey(key, kWebCryptoKeyFormatPKCS8); - } - break; + return require('internal/crypto/ec') + .ecExportKey(key, kWebCryptoKeyFormatPKCS8); case 'Ed25519': // Fall through case 'Ed448': @@ -386,28 +369,20 @@ async function exportKeyPkcs8(key) { case 'X25519': // Fall through case 'X448': - if (key.type === 'private') { - return require('internal/crypto/cfrg') - .cfrgExportKey(key, kWebCryptoKeyFormatPKCS8); - } - break; + return require('internal/crypto/cfrg') + .cfrgExportKey(key, kWebCryptoKeyFormatPKCS8); + default: + return undefined; } - - throw lazyDOMException( - `Unable to export a pkcs8 ${key.algorithm.name} ${key.type} key`, - 'InvalidAccessError'); } -async function exportKeyRaw(key) { +async function exportKeyRawPublic(key) { switch (key.algorithm.name) { case 'ECDSA': // Fall through case 'ECDH': - if (key.type === 'public') { - return require('internal/crypto/ec') - .ecExportKey(key, kWebCryptoKeyFormatRaw); - } - break; + return require('internal/crypto/ec') + .ecExportKey(key, kWebCryptoKeyFormatRaw); case 'Ed25519': // Fall through case 'Ed448': @@ -415,11 +390,15 @@ async function exportKeyRaw(key) { case 'X25519': // Fall through case 'X448': - if (key.type === 'public') { - return require('internal/crypto/cfrg') - .cfrgExportKey(key, kWebCryptoKeyFormatRaw); - } - break; + return require('internal/crypto/cfrg') + .cfrgExportKey(key, kWebCryptoKeyFormatRaw); + default: + return undefined; + } +} + +async function exportKeyRawSecret(key) { + switch (key.algorithm.name) { case 'AES-CTR': // Fall through case 'AES-CBC': @@ -429,51 +408,46 @@ async function exportKeyRaw(key) { case 'AES-KW': // Fall through case 'HMAC': - return key[kKeyObject].export().buffer; + return key[kKeyObject][kHandle].export().buffer; + default: + return undefined; } - - throw lazyDOMException( - `Unable to export a raw ${key.algorithm.name} ${key.type} key`, - 'InvalidAccessError'); } async function exportKeyJWK(key) { - const jwk = key[kKeyObject][kHandle].exportJwk({ + const parameters = { key_ops: key.usages, ext: key.extractable, - }, true); + }; switch (key.algorithm.name) { case 'RSASSA-PKCS1-v1_5': - jwk.alg = normalizeHashName( + parameters.alg = normalizeHashName( key.algorithm.hash.name, normalizeHashName.kContextJwkRsa); - return jwk; + break; case 'RSA-PSS': - jwk.alg = normalizeHashName( + parameters.alg = normalizeHashName( key.algorithm.hash.name, normalizeHashName.kContextJwkRsaPss); - return jwk; + break; case 'RSA-OAEP': - jwk.alg = normalizeHashName( + parameters.alg = normalizeHashName( key.algorithm.hash.name, normalizeHashName.kContextJwkRsaOaep); - return jwk; + break; case 'ECDSA': // Fall through case 'ECDH': - jwk.crv ||= key.algorithm.namedCurve; - return jwk; + // Fall through case 'X25519': // Fall through case 'X448': - jwk.crv ||= key.algorithm.name; - return jwk; + break; case 'Ed25519': // Fall through case 'Ed448': - jwk.crv ||= key.algorithm.name; - jwk.alg = key.algorithm.name; - return jwk; + parameters.alg = key.algorithm.name; + break; case 'AES-CTR': // Fall through case 'AES-CBC': @@ -481,19 +455,19 @@ async function exportKeyJWK(key) { case 'AES-GCM': // Fall through case 'AES-KW': - jwk.alg = require('internal/crypto/aes') + parameters.alg = require('internal/crypto/aes') .getAlgorithmName(key.algorithm.name, key.algorithm.length); - return jwk; + break; case 'HMAC': - jwk.alg = normalizeHashName( + parameters.alg = normalizeHashName( key.algorithm.hash.name, normalizeHashName.kContextJwkHmac); - return jwk; + break; default: - // Fall through + return undefined; } - throw lazyDOMException('Not yet supported', 'NotSupportedError'); + return key[kKeyObject][kHandle].exportJwk(parameters, true); } async function exportKey(format, key) { @@ -511,17 +485,55 @@ async function exportKey(format, key) { context: '2nd argument', }); + try { + normalizeAlgorithm(key.algorithm, 'exportKey'); + } catch { + throw lazyDOMException( + `${key.algorithm.name} key export is not supported`, 'NotSupportedError'); + } + if (!key.extractable) throw lazyDOMException('key is not extractable', 'InvalidAccessException'); + let result; switch (format) { - case 'spki': return exportKeySpki(key); - case 'pkcs8': return exportKeyPkcs8(key); - case 'jwk': return exportKeyJWK(key); - case 'raw': return exportKeyRaw(key); + case 'spki': { + if (key.type === 'public') { + result = await exportKeySpki(key); + } + break; + } + case 'pkcs8': { + if (key.type === 'private') { + result = await exportKeyPkcs8(key); + } + break; + } + case 'jwk': { + result = await exportKeyJWK(key); + break; + } + case 'raw': { + if (key.type === 'secret') { + result = await exportKeyRawSecret(key); + break; + } + + if (key.type === 'public') { + result = await exportKeyRawPublic(key); + break; + } + break; + } } - throw lazyDOMException( - 'Export format is unsupported', 'NotSupportedError'); + + if (!result) { + throw lazyDOMException( + `Unable to export ${key.algorithm.name} ${key.type} key using ${format} format`, + 'NotSupportedError'); + } + + return result; } async function importKey( @@ -608,8 +620,12 @@ async function importKey( extractable, keyUsages); break; - default: - throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); + } + + if (!result) { + throw lazyDOMException( + `Unable to import ${algorithm.name} using ${format} format`, + 'NotSupportedError'); } if ((result.type === 'secret' || result.type === 'private') && result.usages.length === 0) { diff --git a/lib/internal/dns/utils.js b/lib/internal/dns/utils.js index 85918f0d43d5c6..32f8794d62ad73 100644 --- a/lib/internal/dns/utils.js +++ b/lib/internal/dns/utils.js @@ -25,6 +25,7 @@ const { validateInt32, validateOneOf, validateString, + validateUint32, } = require('internal/validators'); let binding; function lazyBinding() { @@ -49,6 +50,12 @@ function validateTimeout(options) { return timeout; } +function validateMaxTimeout(options) { + const { maxTimeout = 0 } = { ...options }; + validateUint32(maxTimeout, 'options.maxTimeout'); + return maxTimeout; +} + function validateTries(options) { const { tries = 4 } = { ...options }; validateInt32(tries, 'options.tries', 1); @@ -67,17 +74,18 @@ class ResolverBase { constructor(options = undefined) { const timeout = validateTimeout(options); const tries = validateTries(options); + const maxTimeout = validateMaxTimeout(options); // If we are building snapshot, save the states of the resolver along // the way. if (isBuildingSnapshot()) { - this[kSnapshotStates] = { timeout, tries }; + this[kSnapshotStates] = { timeout, tries, maxTimeout }; } - this[kInitializeHandle](timeout, tries); + this[kInitializeHandle](timeout, tries, maxTimeout); } - [kInitializeHandle](timeout, tries) { + [kInitializeHandle](timeout, tries, maxTimeout) { const { ChannelWrap } = lazyBinding(); - this._handle = new ChannelWrap(timeout, tries); + this._handle = new ChannelWrap(timeout, tries, maxTimeout); } cancel() { @@ -187,8 +195,8 @@ class ResolverBase { } [kDeserializeResolver]() { - const { timeout, tries, localAddress, servers } = this[kSnapshotStates]; - this[kInitializeHandle](timeout, tries); + const { timeout, tries, maxTimeout, localAddress, servers } = this[kSnapshotStates]; + this[kInitializeHandle](timeout, tries, maxTimeout); if (localAddress) { const { ipv4, ipv6 } = localAddress; this._handle.setLocalAddress(ipv4, ipv6); diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 72e647d018c4e5..234262c8a994e9 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1146,6 +1146,7 @@ E('ERR_CHILD_PROCESS_STDIO_MAXBUFFER', '%s maxBuffer length exceeded', RangeError); E('ERR_CONSOLE_WRITABLE_STREAM', 'Console expects a writable stream instance for %s', TypeError); +E('ERR_CONSTRUCT_CALL_REQUIRED', 'Class constructor %s cannot be invoked without `new`', TypeError); E('ERR_CONTEXT_NOT_INITIALIZED', 'context used is not initialized', Error); E('ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED', 'Custom engines not supported by this OpenSSL', Error); diff --git a/lib/internal/freeze_intrinsics.js b/lib/internal/freeze_intrinsics.js index ac7723b528e872..1d793d719f5fbf 100644 --- a/lib/internal/freeze_intrinsics.js +++ b/lib/internal/freeze_intrinsics.js @@ -31,6 +31,7 @@ const { ArrayPrototype, ArrayPrototypeForEach, ArrayPrototypePush, + AsyncIteratorPrototype, Atomics, BigInt, BigInt64Array, @@ -212,10 +213,7 @@ module.exports = function() { // 27 Control Abstraction Objects // 27.1 Iteration IteratorPrototype, // 27.1.2 IteratorPrototype - // 27.1.3 AsyncIteratorPrototype - ObjectGetPrototypeOf(ObjectGetPrototypeOf(ObjectGetPrototypeOf( - (async function*() {})(), - ))), + AsyncIteratorPrototype, // 27.1.3 AsyncIteratorPrototype PromisePrototype, // 27.2 // Other APIs / Web Compatibility diff --git a/lib/internal/fs/glob.js b/lib/internal/fs/glob.js index b5966549045b79..cffd67c78aae90 100644 --- a/lib/internal/fs/glob.js +++ b/lib/internal/fs/glob.js @@ -143,7 +143,7 @@ class Cache { if (cached) { return cached; } - const promise = PromisePrototypeThen(readdir(path, { __proto__: null, withFileTypes: true }), null, () => null); + const promise = PromisePrototypeThen(readdir(path, { __proto__: null, withFileTypes: true }), null, () => []); this.#readdirCache.set(path, promise); return promise; } diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 96a60a55d69504..a6d4fb2ecb9e28 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -111,6 +111,7 @@ const kHandle = Symbol('kHandle'); const kFd = Symbol('kFd'); const kRefs = Symbol('kRefs'); const kClosePromise = Symbol('kClosePromise'); +const kCloseReason = Symbol('kCloseReason'); const kCloseResolve = Symbol('kCloseResolve'); const kCloseReject = Symbol('kCloseReject'); const kRef = Symbol('kRef'); @@ -389,6 +390,7 @@ class FileHandle extends EventEmitter { const handle = this[kHandle]; this[kFd] = -1; + this[kCloseReason] = 'The FileHandle has been transferred'; this[kHandle] = null; this[kRefs] = 0; @@ -455,7 +457,7 @@ async function fsCall(fn, handle, ...args) { if (handle.fd === -1) { // eslint-disable-next-line no-restricted-syntax - const err = new Error('file closed'); + const err = new Error(handle[kCloseReason] ?? 'file closed'); err.code = 'EBADF'; err.syscall = fn.name; throw err; diff --git a/lib/internal/histogram.js b/lib/internal/histogram.js index fe0fab0ea5d2f7..f2cf3835b9a62a 100644 --- a/lib/internal/histogram.js +++ b/lib/internal/histogram.js @@ -1,12 +1,13 @@ 'use strict'; const { + Map, + MapPrototypeClear, MapPrototypeEntries, NumberIsNaN, NumberMAX_SAFE_INTEGER, ObjectFromEntries, ReflectConstruct, - SafeMap, Symbol, } = primordials; @@ -216,7 +217,7 @@ class Histogram { get percentiles() { if (!isHistogram(this)) throw new ERR_INVALID_THIS('Histogram'); - this[kMap].clear(); + MapPrototypeClear(this[kMap]); this[kHandle]?.percentiles(this[kMap]); return this[kMap]; } @@ -228,7 +229,7 @@ class Histogram { get percentilesBigInt() { if (!isHistogram(this)) throw new ERR_INVALID_THIS('Histogram'); - this[kMap].clear(); + MapPrototypeClear(this[kMap]); this[kHandle]?.percentilesBigInt(this[kMap]); return this[kMap]; } @@ -331,7 +332,7 @@ function ClonedHistogram(handle) { function() { markTransferMode(this, true, false); this[kHandle] = handle; - this[kMap] = new SafeMap(); + this[kMap] = new Map(); }, [], Histogram); } @@ -342,7 +343,7 @@ function ClonedRecordableHistogram(handle) { markTransferMode(histogram, true, false); histogram[kRecordable] = true; - histogram[kMap] = new SafeMap(); + histogram[kMap] = new Map(); histogram[kHandle] = handle; histogram.constructor = RecordableHistogram; diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 689481d2cca0c6..39c6409ae2e320 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -252,6 +252,7 @@ const kProceed = Symbol('proceed'); const kRemoteSettings = Symbol('remote-settings'); const kRequestAsyncResource = Symbol('requestAsyncResource'); const kSentHeaders = Symbol('sent-headers'); +const kRawHeaders = Symbol('raw-headers'); const kSentTrailers = Symbol('sent-trailers'); const kServer = Symbol('server'); const kState = Symbol('state'); @@ -1813,12 +1814,14 @@ class ClientHttp2Session extends Http2Session { let headersList; let headersObject; + let rawHeaders; let scheme; let authority; let method; if (ArrayIsArray(headersParam)) { ({ + rawHeaders, headersList, scheme, authority, @@ -1865,6 +1868,7 @@ class ClientHttp2Session extends Http2Session { // eslint-disable-next-line no-use-before-define const stream = new ClientHttp2Stream(this, undefined, undefined, {}); stream[kSentHeaders] = headersObject; // N.b. Only set for object headers, not raw headers + stream[kRawHeaders] = rawHeaders; // N.b. Only set for raw headers, not object headers stream[kOrigin] = `${scheme}://${authority}`; const reqAsync = new AsyncResource('PendingRequest'); stream[kRequestAsyncResource] = reqAsync; @@ -2134,6 +2138,33 @@ class Http2Stream extends Duplex { } get sentHeaders() { + if (this[kSentHeaders] || !this[kRawHeaders]) { + return this[kSentHeaders]; + } + + const rawHeaders = this[kRawHeaders]; + const headersObject = { __proto__: null }; + + for (let i = 0; i < rawHeaders.length; i += 2) { + const key = rawHeaders[i]; + const value = rawHeaders[i + 1]; + + const existing = headersObject[key]; + if (existing === undefined) { + headersObject[key] = value; + } else if (ArrayIsArray(existing)) { + existing.push(value); + } else { + headersObject[key] = [existing, value]; + } + } + + if (rawHeaders[kSensitiveHeaders] !== undefined) { + headersObject[kSensitiveHeaders] = rawHeaders[kSensitiveHeaders]; + } + + this[kSentHeaders] = headersObject; + return this[kSentHeaders]; } diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index 396623d3b9d06f..c5ad2dd4854f90 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -677,15 +677,23 @@ function prepareRequestHeadersArray(headers, session) { throw new ERR_HTTP2_CONNECT_PATH(); } - const headersList = buildNgHeaderString( + const rawHeaders = additionalPsuedoHeaders.length ? additionalPsuedoHeaders.concat(headers) : - headers, + headers; + + if (headers[kSensitiveHeaders] !== undefined) { + rawHeaders[kSensitiveHeaders] = headers[kSensitiveHeaders]; + } + + const headersList = buildNgHeaderString( + rawHeaders, assertValidPseudoHeader, headers[kSensitiveHeaders], ); return { + rawHeaders, headersList, scheme, authority: authority ?? headers[HTTP2_HEADER_HOST], diff --git a/lib/internal/inspector/network_resources.js b/lib/internal/inspector/network_resources.js new file mode 100644 index 00000000000000..166b4222cc297f --- /dev/null +++ b/lib/internal/inspector/network_resources.js @@ -0,0 +1,27 @@ +'use strict'; + +const { getOptionValue } = require('internal/options'); +const { validateString } = require('internal/validators'); +const { putNetworkResource } = internalBinding('inspector'); + +/** + * Registers a resource for the inspector using the internal 'putNetworkResource' binding. + * @param {string} url - The URL of the resource. + * @param {string} data - The content of the resource to provide. + */ +function put(url, data) { + if (!getOptionValue('--experimental-inspector-network-resource')) { + process.emitWarning( + 'The --experimental-inspector-network-resource option is not enabled. ' + + 'Please enable it to use the putNetworkResource function'); + return; + } + validateString(url, 'url'); + validateString(data, 'data'); + + putNetworkResource(url, data); +} + +module.exports = { + put, +}; diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 9b51e5bf4cdfbc..a7d8fa1139c820 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -51,6 +51,7 @@ const { ReflectSet, RegExpPrototypeExec, SafeMap, + SafeSet, String, StringPrototypeCharAt, StringPrototypeCharCodeAt, @@ -154,6 +155,7 @@ const internalFsBinding = internalBinding('fs'); const { safeGetenv } = internalBinding('credentials'); const { getCjsConditions, + getCjsConditionsArray, initializeCjsConditions, loadBuiltinModule, makeRequireFunction, @@ -632,6 +634,8 @@ const EXPORTS_PATTERN = /^((?:@[^/\\%]+\/)?[^./\\%][^/\\%]*)(\/.*)?$/; * Resolves the exports for a given module path and request. * @param {string} nmPath The path to the module. * @param {string} request The request for the module. + * @param {Set} conditions The conditions to use for resolution. + * @returns {undefined|string} */ function resolveExports(nmPath, request, conditions) { // The implementation's behavior is meant to mirror resolution in ESM. @@ -1040,9 +1044,22 @@ function resolveForCJSWithHooks(specifier, parent, isMain) { function defaultResolve(specifier, context) { // TODO(joyeecheung): parent and isMain should be part of context, then we // no longer need to use a different defaultResolve for every resolution. + // In the hooks, context.conditions is passed around as an array, but internally + // the resolution helpers expect a SafeSet. Do the conversion here. + let conditionSet; + const conditions = context.conditions; + if (conditions !== undefined && conditions !== getCjsConditionsArray()) { + if (!ArrayIsArray(conditions)) { + throw new ERR_INVALID_ARG_VALUE('context.conditions', conditions, + 'expected an array'); + } + conditionSet = new SafeSet(conditions); + } else { + conditionSet = getCjsConditions(); + } defaultResolvedFilename = defaultResolveImpl(specifier, parent, isMain, { __proto__: null, - conditions: context.conditions, + conditions: conditionSet, }); defaultResolvedURL = convertCJSFilenameToURL(defaultResolvedFilename); @@ -1050,7 +1067,7 @@ function resolveForCJSWithHooks(specifier, parent, isMain) { } const resolveResult = resolveWithHooks(specifier, parentURL, /* importAttributes */ undefined, - getCjsConditions(), defaultResolve); + getCjsConditionsArray(), defaultResolve); const { url } = resolveResult; format = resolveResult.format; @@ -1126,7 +1143,7 @@ function loadBuiltinWithHooks(id, url, format) { url ??= `node:${id}`; // TODO(joyeecheung): do we really want to invoke the load hook for the builtins? const loadResult = loadWithHooks(url, format || 'builtin', /* importAttributes */ undefined, - getCjsConditions(), getDefaultLoad(url, id)); + getCjsConditionsArray(), getDefaultLoad(url, id)); if (loadResult.format && loadResult.format !== 'builtin') { return undefined; // Format has been overridden, return undefined for the caller to continue loading. } @@ -1277,7 +1294,8 @@ Module._load = function(request, parent, isMain) { * @param {ResolveFilenameOptions} options Options object * @typedef {object} ResolveFilenameOptions * @property {string[]} paths Paths to search for modules in - * @property {string[]} conditions Conditions used for resolution. + * @property {Set?} conditions The conditions to use for resolution. + * @returns {void|string} */ Module._resolveFilename = function(request, parent, isMain, options) { if (BuiltinModule.normalizeRequirableId(request)) { @@ -1720,7 +1738,8 @@ function loadSource(mod, filename, formatFromNode) { mod[kURL] = convertCJSFilenameToURL(filename); } - const loadResult = loadWithHooks(mod[kURL], mod[kFormat], /* importAttributes */ undefined, getCjsConditions(), + const loadResult = loadWithHooks(mod[kURL], mod[kFormat], /* importAttributes */ undefined, + getCjsConditionsArray(), getDefaultLoad(mod[kURL], filename)); // Reset the module properties with load hook results. diff --git a/lib/internal/modules/esm/formats.js b/lib/internal/modules/esm/formats.js index 91e3a906c9a34d..2689674c7a0f94 100644 --- a/lib/internal/modules/esm/formats.js +++ b/lib/internal/modules/esm/formats.js @@ -9,20 +9,15 @@ const { getValidatedPath } = require('internal/fs/utils'); const fsBindings = internalBinding('fs'); const { internal: internalConstants } = internalBinding('constants'); -const experimentalWasmModules = getOptionValue('--experimental-wasm-modules'); - const extensionFormatMap = { '__proto__': null, '.cjs': 'commonjs', '.js': 'module', '.json': 'json', '.mjs': 'module', + '.wasm': 'wasm', }; -if (experimentalWasmModules) { - extensionFormatMap['.wasm'] = 'wasm'; -} - if (getOptionValue('--experimental-strip-types')) { extensionFormatMap['.ts'] = 'module-typescript'; extensionFormatMap['.mts'] = 'module-typescript'; @@ -41,7 +36,7 @@ function mimeToFormat(mime) { ) !== null ) { return 'module'; } if (mime === 'application/json') { return 'json'; } - if (experimentalWasmModules && mime === 'application/wasm') { return 'wasm'; } + if (mime === 'application/wasm') { return 'wasm'; } return null; } @@ -52,7 +47,6 @@ function mimeToFormat(mime) { * @param {URL} url */ function getFormatOfExtensionlessFile(url) { - if (!experimentalWasmModules) { return 'module'; } const path = getValidatedPath(url); switch (fsBindings.getFormatOfExtensionlessFile(path)) { case internalConstants.EXTENSIONLESS_FORMAT_WASM: diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 8d98d50395cf7f..87c14d1f84def7 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -42,6 +42,7 @@ const { kEvaluated, kEvaluating, kInstantiated, + kErrored, throwIfPromiseRejected, } = internalBinding('module_wrap'); const { @@ -394,6 +395,9 @@ class ModuleLoader { mod[kRequiredModuleSymbol] = job.module; const { namespace } = job.runSync(parent); return { wrap: job.module, namespace: namespace || job.module.getNamespace() }; + } else if (status === kErrored) { + // If the module was previously imported and errored, throw the error. + throw job.module.getError(); } // When the cached async job have already encountered a linking // error that gets wrapped into a rejection, but is still later @@ -687,24 +691,30 @@ class ModuleLoader { if (this.#customizations) { // Only has module.register hooks. return this.#customizations.resolve(specifier, parentURL, importAttributes); } - return this.#cachedDefaultResolve(specifier, parentURL, importAttributes); + return this.#cachedDefaultResolve(specifier, { + __proto__: null, + conditions: this.#defaultConditions, + parentURL, + importAttributes, + }); } /** * Either return a cached resolution, or perform the default resolution which is synchronous, and * cache the result. * @param {string} specifier See {@link resolve}. - * @param {string} [parentURL] See {@link resolve}. - * @param {ImportAttributes} importAttributes See {@link resolve}. + * @param {{ parentURL?: string, importAttributes: ImportAttributes, conditions?: string[]}} context * @returns {{ format: string, url: string }} */ - #cachedDefaultResolve(specifier, parentURL, importAttributes) { + #cachedDefaultResolve(specifier, context) { + const { parentURL, importAttributes } = context; const requestKey = this.#resolveCache.serializeKey(specifier, importAttributes); const cachedResult = this.#resolveCache.get(requestKey, parentURL); if (cachedResult != null) { return cachedResult; } - const result = this.defaultResolve(specifier, parentURL, importAttributes); + defaultResolve ??= require('internal/modules/esm/resolve').defaultResolve; + const result = defaultResolve(specifier, context); this.#resolveCache.set(requestKey, parentURL, result); return result; } @@ -732,14 +742,15 @@ class ModuleLoader { * This is the default resolve step for module.registerHooks(), which incorporates asynchronous hooks * from module.register() which are run in a blocking fashion for it to be synchronous. * @param {string|URL} specifier See {@link resolveSync}. - * @param {{ parentURL?: string, importAttributes: ImportAttributes}} context See {@link resolveSync}. + * @param {{ parentURL?: string, importAttributes: ImportAttributes, conditions?: string[]}} context + * See {@link resolveSync}. * @returns {{ format: string, url: string }} */ #resolveAndMaybeBlockOnLoaderThread(specifier, context) { if (this.#customizations) { return this.#customizations.resolveSync(specifier, context.parentURL, context.importAttributes); } - return this.#cachedDefaultResolve(specifier, context.parentURL, context.importAttributes); + return this.#cachedDefaultResolve(specifier, context); } /** @@ -762,25 +773,12 @@ class ModuleLoader { return resolveWithHooks(specifier, parentURL, importAttributes, this.#defaultConditions, this.#resolveAndMaybeBlockOnLoaderThread.bind(this)); } - return this.#resolveAndMaybeBlockOnLoaderThread(specifier, { parentURL, importAttributes }); - } - - /** - * Our `defaultResolve` is synchronous and can be used in both - * `resolve` and `resolveSync`. This function is here just to avoid - * repeating the same code block twice in those functions. - */ - defaultResolve(originalSpecifier, parentURL, importAttributes) { - defaultResolve ??= require('internal/modules/esm/resolve').defaultResolve; - - const context = { + return this.#resolveAndMaybeBlockOnLoaderThread(specifier, { __proto__: null, conditions: this.#defaultConditions, - importAttributes, parentURL, - }; - - return defaultResolve(originalSpecifier, context); + importAttributes, + }); } /** diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index c27dad5484ced6..2ddfc77e64f8da 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -297,7 +297,7 @@ class ModuleJob extends ModuleJobBase { assert(this.module instanceof ModuleWrap); let status = this.module.getStatus(); - debug('ModuleJob.runSync', this.module); + debug('ModuleJob.runSync()', status, this.module); // FIXME(joyeecheung): this cannot fully handle < kInstantiated. Make the linking // fully synchronous instead. if (status === kUninstantiated) { @@ -332,6 +332,8 @@ class ModuleJob extends ModuleJobBase { } async run(isEntryPoint = false) { + debug('ModuleJob.run()', this.module); + assert(this.phase === kEvaluationPhase); await this.instantiate(); if (isEntryPoint) { globalThis[entry_point_module_private_symbol] = this.module; @@ -411,7 +413,11 @@ class ModuleJobSync extends ModuleJobBase { async run() { // This path is hit by a require'd module that is imported again. const status = this.module.getStatus(); - if (status > kInstantiated) { + debug('ModuleJobSync.run()', status, this.module); + // If the module was previously required and errored, reject from import() again. + if (status === kErrored) { + throw this.module.getError(); + } else if (status > kInstantiated) { if (this.evaluationPromise) { await this.evaluationPromise; } @@ -432,6 +438,8 @@ class ModuleJobSync extends ModuleJobBase { } runSync(parent) { + debug('ModuleJobSync.runSync()', this.module); + assert(this.phase === kEvaluationPhase); // TODO(joyeecheung): add the error decoration logic from the async instantiate. this.module.async = this.module.instantiateSync(); // If --experimental-print-required-tla is true, proceeds to evaluation even diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js index 3a69558d7a3dba..d595ccbab8ce30 100644 --- a/lib/internal/modules/esm/translators.js +++ b/lib/internal/modules/esm/translators.js @@ -433,8 +433,6 @@ translators.set('json', function jsonStrategy(url, source) { */ const wasmInstances = new SafeWeakMap(); translators.set('wasm', async function(url, source) { - emitExperimentalWarning('Importing WebAssembly modules'); - assertBufferSource(source, false, 'load'); debug(`Translating WASMModule ${url}`); @@ -444,7 +442,9 @@ translators.set('wasm', async function(url, source) { // TODO(joyeecheung): implement a translator that just uses // compiled = new WebAssembly.Module(source) to compile it // synchronously. - compiled = await WebAssembly.compile(source); + compiled = await WebAssembly.compile(source, { + builtins: ['js-string'], + }); } catch (err) { err.message = errPath(url) + ': ' + err.message; throw err; @@ -456,6 +456,13 @@ translators.set('wasm', async function(url, source) { if (impt.kind === 'global') { ArrayPrototypePush(wasmGlobalImports, impt); } + // Prefix reservations per https://webassembly.github.io/esm-integration/js-api/index.html#parse-a-webassembly-module. + if (impt.module.startsWith('wasm-js:')) { + throw new WebAssembly.LinkError(`Invalid Wasm import "${impt.module}" in ${url}`); + } + if (impt.name.startsWith('wasm:') || impt.name.startsWith('wasm-js:')) { + throw new WebAssembly.LinkError(`Invalid Wasm import name "${impt.module}" in ${url}`); + } importsList.add(impt.module); } @@ -465,12 +472,16 @@ translators.set('wasm', async function(url, source) { if (expt.kind === 'global') { wasmGlobalExports.add(expt.name); } + if (expt.name.startsWith('wasm:') || expt.name.startsWith('wasm-js:')) { + throw new WebAssembly.LinkError(`Invalid Wasm export name "${expt.name}" in ${url}`); + } exportsList.add(expt.name); } const createDynamicModule = require('internal/modules/esm/create_dynamic_module'); const { module } = createDynamicModule([...importsList], [...exportsList], url, (reflect) => { + emitExperimentalWarning('Importing WebAssembly module instances'); for (const impt of importsList) { const importNs = reflect.imports[impt]; const wasmInstance = wasmInstances.get(importNs); @@ -487,6 +498,7 @@ translators.set('wasm', async function(url, source) { reflect.imports[impt] = wrappedModule; } } + // In cycles importing unexecuted Wasm, wasmInstance will be undefined, which will fail during // instantiation, since all bindings will be in the Temporal Deadzone (TDZ). const { exports } = new WebAssembly.Instance(compiled, reflect.imports); diff --git a/lib/internal/modules/helpers.js b/lib/internal/modules/helpers.js index 84ee4d33bf1633..f906d69b7359ac 100644 --- a/lib/internal/modules/helpers.js +++ b/lib/internal/modules/helpers.js @@ -65,6 +65,9 @@ function toRealPath(requestPath) { /** @type {Set} */ let cjsConditions; +/** @type {string[]} */ +let cjsConditionsArray; + /** * Define the conditions that apply to the CommonJS loader. */ @@ -73,15 +76,17 @@ function initializeCjsConditions() { const noAddons = getOptionValue('--no-addons'); const addonConditions = noAddons ? [] : ['node-addons']; // TODO: Use this set when resolving pkg#exports conditions in loader.js. - cjsConditions = new SafeSet([ + cjsConditionsArray = [ 'require', 'node', ...addonConditions, ...userConditions, - ]); + ]; if (getOptionValue('--experimental-require-module')) { - cjsConditions.add('module-sync'); + cjsConditionsArray.push('module-sync'); } + ObjectFreeze(cjsConditionsArray); + cjsConditions = new SafeSet(cjsConditionsArray); } /** @@ -94,6 +99,13 @@ function getCjsConditions() { return cjsConditions; } +function getCjsConditionsArray() { + if (cjsConditionsArray === undefined) { + initializeCjsConditions(); + } + return cjsConditionsArray; +} + /** * Provide one of Node.js' public modules to user code. * @param {string} id - The identifier/specifier of the builtin module to load @@ -407,6 +419,7 @@ module.exports = { flushCompileCache, getBuiltinModule, getCjsConditions, + getCjsConditionsArray, getCompileCacheDir, initializeCjsConditions, loadBuiltinModule, diff --git a/lib/internal/modules/package_json_reader.js b/lib/internal/modules/package_json_reader.js index 47b248c2ae6306..fb669ea12eeff3 100644 --- a/lib/internal/modules/package_json_reader.js +++ b/lib/internal/modules/package_json_reader.js @@ -5,6 +5,7 @@ const { JSONParse, ObjectDefineProperty, RegExpPrototypeExec, + SafeMap, StringPrototypeIndexOf, StringPrototypeSlice, } = primordials; @@ -28,6 +29,8 @@ const path = require('path'); const { validateString } = require('internal/validators'); const internalFsBinding = internalBinding('fs'); +const nearestParentPackageJSONCache = new SafeMap(); + /** * @typedef {import('typings/internalBinding/modules').DeserializedPackageConfig} DeserializedPackageConfig * @typedef {import('typings/internalBinding/modules').PackageConfig} PackageConfig @@ -131,13 +134,21 @@ function read(jsonPath, { base, specifier, isESM } = kEmptyObject) { * @returns {undefined | DeserializedPackageConfig} */ function getNearestParentPackageJSON(checkPath) { + if (nearestParentPackageJSONCache.has(checkPath)) { + return nearestParentPackageJSONCache.get(checkPath); + } + const result = modulesBinding.getNearestParentPackageJSON(checkPath); if (result === undefined) { + nearestParentPackageJSONCache.set(checkPath, undefined); return undefined; } - return deserializePackageJSON(checkPath, result); + const packageConfig = deserializePackageJSON(checkPath, result); + nearestParentPackageJSONCache.set(checkPath, packageConfig); + + return packageConfig; } /** diff --git a/lib/internal/process/per_thread.js b/lib/internal/process/per_thread.js index 47e86728458654..092464c9ffd440 100644 --- a/lib/internal/process/per_thread.js +++ b/lib/internal/process/per_thread.js @@ -40,6 +40,7 @@ const { ERR_FEATURE_UNAVAILABLE_ON_PLATFORM, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL, ERR_WORKER_UNSUPPORTED_OPERATION, @@ -110,6 +111,7 @@ function nop() {} function wrapProcessMethods(binding) { const { cpuUsage: _cpuUsage, + threadCpuUsage: _threadCpuUsage, memoryUsage: _memoryUsage, rss, resourceUsage: _resourceUsage, @@ -162,6 +164,50 @@ function wrapProcessMethods(binding) { }; } + const threadCpuValues = new Float64Array(2); + + // Replace the native function with the JS version that calls the native + // function. + function threadCpuUsage(prevValue) { + // If a previous value was passed in, ensure it has the correct shape. + if (prevValue) { + if (!previousValueIsValid(prevValue.user)) { + validateObject(prevValue, 'prevValue'); + + validateNumber(prevValue.user, 'prevValue.user'); + throw new ERR_INVALID_ARG_VALUE.RangeError('prevValue.user', + prevValue.user); + } + + if (!previousValueIsValid(prevValue.system)) { + validateNumber(prevValue.system, 'prevValue.system'); + throw new ERR_INVALID_ARG_VALUE.RangeError('prevValue.system', + prevValue.system); + } + } + + if (process.platform === 'sunos') { + throw new ERR_OPERATION_FAILED('threadCpuUsage is not available on SunOS'); + } + + // Call the native function to get the current values. + _threadCpuUsage(threadCpuValues); + + // If a previous value was passed in, return diff of current from previous. + if (prevValue) { + return { + user: threadCpuValues[0] - prevValue.user, + system: threadCpuValues[1] - prevValue.system, + }; + } + + // If no previous value passed in, return current value. + return { + user: threadCpuValues[0], + system: threadCpuValues[1], + }; + } + // Ensure that a previously passed in value is valid. Currently, the native // implementation always returns numbers <= Number.MAX_SAFE_INTEGER. function previousValueIsValid(num) { @@ -237,7 +283,7 @@ function wrapProcessMethods(binding) { return true; } - function execve(execPath, args, env) { + function execve(execPath, args = [], env) { emitExperimentalWarning('process.execve'); const { isMainThread } = require('internal/worker'); @@ -326,6 +372,7 @@ function wrapProcessMethods(binding) { return { _rawDebug, cpuUsage, + threadCpuUsage, resourceUsage, memoryUsage, kill, diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js index 0cda54fd85e1e0..98ed40e3076f66 100644 --- a/lib/internal/process/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -440,10 +440,7 @@ function setupStacktracePrinterOnSigint() { if (!getOptionValue('--trace-sigint')) { return; } - const { SigintWatchdog } = require('internal/watchdog'); - - const watchdog = new SigintWatchdog(); - watchdog.start(); + require('internal/util/trace_sigint').setTraceSigInt(true); } function initializeReport() { diff --git a/lib/internal/source_map/prepare_stack_trace.js b/lib/internal/source_map/prepare_stack_trace.js index 3e4b0825e7b3a5..814ea396f60144 100644 --- a/lib/internal/source_map/prepare_stack_trace.js +++ b/lib/internal/source_map/prepare_stack_trace.js @@ -112,17 +112,12 @@ function serializeJSStackFrame(sm, callSite, callerCallSite) { const typeName = callSite.getTypeName(); const namePrefix = typeName !== null && typeName !== 'global' ? `${typeName}.` : ''; - const originalName = `${namePrefix}${fnName || ''}`; - // The original call site may have a different symbol name - // associated with it, use it: - const mappedName = (name && name !== originalName) ? - `${name}` : - `${originalName}`; - const hasName = !!(name || originalName); + const originalName = `${fnName || ''}`; + const mappedName = `${namePrefix}${name || originalName}` || ''; // Replace the transpiled call site with the original: - return `${prefix}${mappedName}${hasName ? ' (' : ''}` + + return `${prefix}${mappedName} (` + `${originalSourceNoScheme}:${originalLine + 1}:` + - `${originalColumn + 1}${hasName ? ')' : ''}`; + `${originalColumn + 1})`; } // Transpilers may have removed the original symbol name used in the stack diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index a3a1a965b00a87..84ef2e2629f4f3 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -786,7 +786,7 @@ function clearBuffer(stream, state) { if (i === buffered.length) { resetBuffer(state); } else if (i > 256) { - buffered.splice(0, i); + state[kBufferedValue] = ArrayPrototypeSlice(buffered, i); state.bufferedIndex = 0; } else { state.bufferedIndex = i; diff --git a/lib/internal/util.js b/lib/internal/util.js index 7e460735493f10..50863e2fda8952 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -158,7 +158,7 @@ function pendingDeprecate(fn, msg, code) { // Mark that a method should not be used. // Returns a modified function which warns once by default. // If --no-deprecation is set, then it is a no-op. -function deprecate(fn, msg, code, useEmitSync) { +function deprecate(fn, msg, code, useEmitSync, modifyPrototype = true) { // Lazy-load to avoid a circular dependency. if (validateString === undefined) ({ validateString } = require('internal/validators')); @@ -181,19 +181,23 @@ function deprecate(fn, msg, code, useEmitSync) { return ReflectApply(fn, this, args); } - // The wrapper will keep the same prototype as fn to maintain prototype chain - ObjectSetPrototypeOf(deprecated, fn); - if (fn.prototype) { - // Setting this (rather than using Object.setPrototype, as above) ensures - // that calling the unwrapped constructor gives an instanceof the wrapped - // constructor. - deprecated.prototype = fn.prototype; - } + if (modifyPrototype) { + // The wrapper will keep the same prototype as fn to maintain prototype chain + // Modifying the prototype does alter the object chains, and as observed in + // most cases, it slows the code. + ObjectSetPrototypeOf(deprecated, fn); + if (fn.prototype) { + // Setting this (rather than using Object.setPrototype, as above) ensures + // that calling the unwrapped constructor gives an instanceof the wrapped + // constructor. + deprecated.prototype = fn.prototype; + } - ObjectDefineProperty(deprecated, 'length', { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(fn, 'length'), - }); + ObjectDefineProperty(deprecated, 'length', { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(fn, 'length'), + }); + } return deprecated; } diff --git a/lib/internal/util/trace_sigint.js b/lib/internal/util/trace_sigint.js new file mode 100644 index 00000000000000..fe946f1486c232 --- /dev/null +++ b/lib/internal/util/trace_sigint.js @@ -0,0 +1,29 @@ +'use strict'; + +const { isMainThread } = require('worker_threads'); +const { + ERR_WORKER_UNSUPPORTED_OPERATION, +} = require('internal/errors').codes; + +let sigintWatchdog; +function getSigintWatchdog() { + if (!sigintWatchdog) { + const { SigintWatchdog } = require('internal/watchdog'); + sigintWatchdog = new SigintWatchdog(); + } + return sigintWatchdog; +} + +function setTraceSigInt(enable) { + if (!isMainThread) + throw new ERR_WORKER_UNSUPPORTED_OPERATION('Calling util.setTraceSigInt'); + if (enable) { + getSigintWatchdog().start(); + } else { + getSigintWatchdog().stop(); + } +}; + +module.exports = { + setTraceSigInt, +}; diff --git a/lib/internal/worker.js b/lib/internal/worker.js index b1fe97a5b81285..748e584ce2c6c0 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -8,6 +8,7 @@ const { Float64Array, FunctionPrototypeBind, MathMax, + NumberMAX_SAFE_INTEGER, ObjectEntries, Promise, PromiseResolve, @@ -39,6 +40,7 @@ const { ERR_WORKER_INVALID_EXEC_ARGV, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, } = errorCodes; const workerIo = require('internal/worker/io'); @@ -59,7 +61,7 @@ const { createMainThreadPort, destroyMainThreadPort } = require('internal/worker const { deserializeError } = require('internal/error_serdes'); const { fileURLToPath, isURL, pathToFileURL } = require('internal/url'); const { kEmptyObject, SymbolAsyncDispose } = require('internal/util'); -const { validateArray, validateString } = require('internal/validators'); +const { validateArray, validateString, validateObject, validateNumber } = require('internal/validators'); const { throwIfBuildingSnapshot, } = require('internal/v8/startup_snapshot'); @@ -473,6 +475,37 @@ class Worker extends EventEmitter { }; }); } + + cpuUsage(prev) { + if (prev) { + validateObject(prev, 'prev'); + validateNumber(prev.user, 'prev.user', 0, NumberMAX_SAFE_INTEGER); + validateNumber(prev.system, 'prev.system', 0, NumberMAX_SAFE_INTEGER); + } + if (process.platform === 'sunos') { + throw new ERR_OPERATION_FAILED('worker.cpuUsage() is not available on SunOS'); + } + const taker = this[kHandle]?.cpuUsage(); + return new Promise((resolve, reject) => { + if (!taker) return reject(new ERR_WORKER_NOT_RUNNING()); + taker.ondone = (err, current) => { + if (err !== null) { + return reject(err); + } + if (prev) { + resolve({ + user: current.user - prev.user, + system: current.system - prev.system, + }); + } else { + resolve({ + user: current.user, + system: current.system, + }); + } + }; + }); + } } /** diff --git a/lib/path.js b/lib/path.js index d39f67e75cd6b0..722e1ea27cf0a0 100644 --- a/lib/path.js +++ b/lib/path.js @@ -74,6 +74,8 @@ const WINDOWS_RESERVED_NAMES = [ 'CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', 'COM6', 'COM7', 'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9', + 'COM\xb9', 'COM\xb2', 'COM\xb3', + 'LPT\xb9', 'LPT\xb2', 'LPT\xb3', ]; function isWindowsReservedName(path, colonIndex) { diff --git a/lib/tls.js b/lib/tls.js index 7dd572afde78cb..442ec0c0db4972 100644 --- a/lib/tls.js +++ b/lib/tls.js @@ -37,6 +37,7 @@ const { ERR_TLS_CERT_ALTNAME_INVALID, ERR_OUT_OF_RANGE, ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, } = require('internal/errors').codes; const internalUtil = require('internal/util'); internalUtil.assertCrypto(); @@ -51,6 +52,8 @@ const { getBundledRootCertificates, getExtraCACertificates, getSystemCACertificates, + resetRootCertStore, + getUserRootCertificates, getSSLCiphers, } = internalBinding('crypto'); const { Buffer } = require('buffer'); @@ -123,8 +126,17 @@ function cacheSystemCACertificates() { } let defaultCACertificates; +let hasResetDefaultCACertificates = false; + function cacheDefaultCACertificates() { if (defaultCACertificates) { return defaultCACertificates; } + + if (hasResetDefaultCACertificates) { + defaultCACertificates = getUserRootCertificates(); + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; + } + defaultCACertificates = []; if (!getOptionValue('--use-openssl-ca')) { @@ -172,6 +184,26 @@ function getCACertificates(type = 'default') { } exports.getCACertificates = getCACertificates; +function setDefaultCACertificates(certs) { + if (!ArrayIsArray(certs)) { + throw new ERR_INVALID_ARG_TYPE('certs', 'Array', certs); + } + + // Verify that all elements in the array are strings + for (let i = 0; i < certs.length; i++) { + if (typeof certs[i] !== 'string' && !isArrayBufferView(certs[i])) { + throw new ERR_INVALID_ARG_TYPE( + `certs[${i}]`, ['string', 'ArrayBufferView'], certs[i]); + } + } + + resetRootCertStore(certs); + defaultCACertificates = undefined; // Reset the cached default certificates + hasResetDefaultCACertificates = true; +} + +exports.setDefaultCACertificates = setDefaultCACertificates; + // Convert protocols array into valid OpenSSL protocols list // ("\x06spdy/2\x08http/1.1\x08http/1.0") function convertProtocols(protocols) { diff --git a/lib/util.js b/lib/util.js index dba1bfba367652..8d8339ec31be53 100644 --- a/lib/util.js +++ b/lib/util.js @@ -26,6 +26,7 @@ const { ArrayPrototypeJoin, ArrayPrototypePop, ArrayPrototypePush, + ArrayPrototypeReduce, Date, DatePrototypeGetDate, DatePrototypeGetHours, @@ -44,6 +45,8 @@ const { ObjectSetPrototypeOf, ObjectValues, ReflectApply, + RegExp, + RegExpPrototypeSymbolReplace, StringPrototypePadStart, StringPrototypeToWellFormed, } = primordials; @@ -254,8 +257,7 @@ function styleText(format, text, { validateStream = true, stream = process.stdou // If the format is not an array, convert it to an array const formatArray = ArrayIsArray(format) ? format : [format]; - let left = ''; - let right = ''; + const codes = []; for (const key of formatArray) { if (key === 'none') continue; const formatCodes = inspect.colors[key]; @@ -264,11 +266,56 @@ function styleText(format, text, { validateStream = true, stream = process.stdou validateOneOf(key, 'format', ObjectKeys(inspect.colors)); } if (skipColorize) continue; - left += escapeStyleCode(formatCodes[0]); - right = `${escapeStyleCode(formatCodes[1])}${right}`; + ArrayPrototypePush(codes, formatCodes); } - return skipColorize ? text : `${left}${text}${right}`; + if (skipColorize) { + return text; + } + + // Build opening codes + let openCodes = ''; + for (let i = 0; i < codes.length; i++) { + openCodes += escapeStyleCode(codes[i][0]); + } + + // Process the text to handle nested styles + let processedText; + if (codes.length > 0) { + processedText = ArrayPrototypeReduce( + codes, + (text, code) => RegExpPrototypeSymbolReplace( + // Find the reset code + new RegExp(`\\u001b\\[${code[1]}m`, 'g'), + text, + (match, offset) => { + // Check if there's more content after this reset + if (offset + match.length < text.length) { + if ( + code[0] === inspect.colors.dim[0] || + code[0] === inspect.colors.bold[0] + ) { + // Dim and bold are not mutually exclusive, so we need to reapply + return `${match}${escapeStyleCode(code[0])}`; + } + return `${escapeStyleCode(code[0])}`; + } + return match; + }, + ), + text, + ); + } else { + processedText = text; + } + + // Build closing codes in reverse order + let closeCodes = ''; + for (let i = codes.length - 1; i >= 0; i--) { + closeCodes += escapeStyleCode(codes[i][1]); + } + + return `${openCodes}${processedText}${closeCodes}`; } const months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', @@ -687,3 +734,9 @@ defineLazyProperties( 'internal/util/diff', ['diff'], ); + +defineLazyProperties( + module.exports, + 'internal/util/trace_sigint', + ['setTraceSigInt'], +); diff --git a/lib/zlib.js b/lib/zlib.js index 590c6edd118903..c46f83f9a9ff75 100644 --- a/lib/zlib.js +++ b/lib/zlib.js @@ -891,12 +891,15 @@ class Zstd extends ZlibBase { const pledgedSrcSize = opts?.pledgedSrcSize ?? undefined; const writeState = new Uint32Array(2); + handle.init( initParamsArray, pledgedSrcSize, writeState, processCallback, + opts?.dictionary && isArrayBufferView(opts.dictionary) ? opts.dictionary : undefined, ); + super(opts, mode, handle, zstdDefaultOpts); this._writeState = writeState; } diff --git a/node.gni b/node.gni index b049f0692980c3..d4438f7fd61598 100644 --- a/node.gni +++ b/node.gni @@ -42,6 +42,9 @@ declare_args() { # The variable is called "openssl" for parity with node's GYP build. node_use_openssl = true + # Build node with SQLite support. + node_use_sqlite = true + # Use the specified path to system CA (PEM format) in addition to # the BoringSSL supplied CA store or compiled-in Mozilla CA copy. node_openssl_system_ca_path = "" diff --git a/node.gyp b/node.gyp index 442c1e7a6ddafb..d604e0ddd97317 100644 --- a/node.gyp +++ b/node.gyp @@ -1336,6 +1336,13 @@ }], ] }, # overlapped-checker + { + 'target_name': 'nop', + 'type': 'executable', + 'sources': [ + 'test/nop/nop.c', + ] + }, # nop { 'target_name': 'node_js2c', 'type': 'executable', diff --git a/onboarding.md b/onboarding.md index 5747450831f515..6cde6329ac149f 100644 --- a/onboarding.md +++ b/onboarding.md @@ -230,11 +230,13 @@ needs to be pointed out separately during the onboarding. labels. The `fast-track` label should cause the Node.js GitHub bot to post a comment in the pull request asking collaborators to approve the pull request by leaving a 👍 reaction on the comment. -* Optional: Run CI on the pull request. Use the `node-test-pull-request` CI +* Optional: Run Jenkins CI on the pull request. Use the [`node-test-pull-request`][] task. As a convenience, you may apply the `request-ci` label to the pull request to have a GitHub Actions workflow start the Jenkins CI task for you. * After two Collaborator approvals for the change and two Collaborator approvals - for fast-tracking, land the PR. + for fast-tracking, land the PR. If you have started a full Jenkins CI, cancel it + from the Jenkins UI since the PR is a doc-only change and does not need + a full CI run, it is just run as an exercise. * If there are not enough approvals within a reasonable time, consider the single approval of the onboarding TSC member sufficient, and land the pull request. @@ -245,6 +247,20 @@ needs to be pointed out separately during the onboarding. * [`core-validate-commit`][] automates the validation of commit messages. This will be run during `git node land --final` of the [`git-node`][] command. + * Normally you can just use the `commit-queue` label to have the + commit queued for landing by the Node.js GitHub bot. But as exercise it is + also useful to learn how to land commits manually in case the bot or the CI + is broken. +* If you are landing the commit manually, to make it appear as "Merged" on GitHub, + after you prepare the landed commit on the local `main` branch, run this: + + ```bash + git push --force-with-lease your-fork-remote HEAD:your-pr-branch # Update the PR branch in your fork. + git push upstream main # Push the landed commit to the upstream main branch. + ``` + + GitHub will automatically detect that the PR branch is now identical to the + `main` branch and will mark the PR as "Merged". ## Final notes @@ -253,11 +269,14 @@ needs to be pointed out separately during the onboarding. * Almost any mistake you could make can be fixed or reverted. * The existing collaborators trust you and are grateful for your help! * Other repositories: - * - * - * - * - * + * : Governance discussions and TSC votes + * : Build infrastructure discussions and CI issues + * : The Node.js website and blog + * : Release management and release planning + * : Tool for testing popular packages against Node.js changes + * : Administrative issues and requests to changes in the Node.js + GitHub organization (e.g. creating new repositories, new teams, adding organization-wide tokens). + * : Requests to moderate comments or block spammers. * The OpenJS Foundation hosts regular summits for active contributors to the Node.js project, where we have face-to-face discussions about our work on the project. The Foundation has travel funds to cover [participants' expenses][] @@ -266,6 +285,8 @@ needs to be pointed out separately during the onboarding. repository for details. * If you are interested in helping to fix coverity reports consider requesting access to the projects coverity project as outlined in [static-analysis][]. +* If you are interested in helping out with CI reliability, check out the + [reliability respository][] and [guide on how to deal with CI flakes][]. [Code of Conduct]: https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md [Labels]: doc/contributing/collaborator-guide.md#labels @@ -275,7 +296,10 @@ needs to be pointed out separately during the onboarding. [`author-ready`]: doc/contributing/collaborator-guide.md#author-ready-pull-requests [`core-validate-commit`]: https://github.com/nodejs/core-validate-commit [`git-node`]: https://github.com/nodejs/node-core-utils/blob/HEAD/docs/git-node.md +[`node-test-pull-request`]: https://ci.nodejs.org/job/node-test-pull-request/ +[guide on how to deal with CI flakes]: https://github.com/nodejs/test?tab=readme-ov-file#protocols-in-improving-ci-reliability [participants' expenses]: https://github.com/openjs-foundation/cross-project-council/blob/main/community-fund/COMMUNITY_FUND_POLICY.md#community-fund-rules +[reliability respository]: https://github.com/nodejs/reliability [set up the credentials]: https://github.com/nodejs/node-core-utils#setting-up-github-credentials [static-analysis]: doc/contributing/static-analysis.md [two-factor authentication]: https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/ diff --git a/src/async_wrap.h b/src/async_wrap.h index ab066e826b3027..db8b3b372d871d 100644 --- a/src/async_wrap.h +++ b/src/async_wrap.h @@ -78,6 +78,7 @@ namespace node { V(UDPWRAP) \ V(SIGINTWATCHDOG) \ V(WORKER) \ + V(WORKERCPUUSAGE) \ V(WORKERHEAPSNAPSHOT) \ V(WORKERHEAPSTATISTICS) \ V(WRITEWRAP) \ diff --git a/src/base_object.h b/src/base_object.h index 778c8a093bfb04..6fecd4776364ec 100644 --- a/src/base_object.h +++ b/src/base_object.h @@ -124,11 +124,6 @@ class BaseObject : public MemoryRetainer { // a BaseObjectPtr to this object. inline void Detach(); - static inline v8::Local GetConstructorTemplate( - Environment* env); - static v8::Local GetConstructorTemplate( - IsolateData* isolate_data); - // Interface for transferring BaseObject instances using the .postMessage() // method of MessagePorts (and, by extension, Workers). // GetTransferMode() returns a transfer mode that indicates how to deal with diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index c18a750239c2a2..444b1f73f9e93c 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -787,14 +787,15 @@ Maybe ParseSoaReply(Environment* env, } } // anonymous namespace -ChannelWrap::ChannelWrap( - Environment* env, - Local object, - int timeout, - int tries) +ChannelWrap::ChannelWrap(Environment* env, + Local object, + int timeout, + int tries, + int max_timeout) : AsyncWrap(env, object, PROVIDER_DNSCHANNEL), timeout_(timeout), - tries_(tries) { + tries_(tries), + max_timeout_(max_timeout) { MakeWeak(); Setup(); @@ -808,13 +809,15 @@ void ChannelWrap::MemoryInfo(MemoryTracker* tracker) const { void ChannelWrap::New(const FunctionCallbackInfo& args) { CHECK(args.IsConstructCall()); - CHECK_EQ(args.Length(), 2); + CHECK_EQ(args.Length(), 3); CHECK(args[0]->IsInt32()); CHECK(args[1]->IsInt32()); + CHECK(args[2]->IsInt32()); const int timeout = args[0].As()->Value(); const int tries = args[1].As()->Value(); + const int max_timeout = args[2].As()->Value(); Environment* env = Environment::GetCurrent(args); - new ChannelWrap(env, args.This(), timeout, tries); + new ChannelWrap(env, args.This(), timeout, tries, max_timeout); } GetAddrInfoReqWrap::GetAddrInfoReqWrap(Environment* env, @@ -879,9 +882,14 @@ void ChannelWrap::Setup() { } /* We do the call to ares_init_option for caller. */ - const int optmask = ARES_OPT_FLAGS | ARES_OPT_TIMEOUTMS | - ARES_OPT_SOCK_STATE_CB | ARES_OPT_TRIES | - ARES_OPT_QUERY_CACHE; + int optmask = ARES_OPT_FLAGS | ARES_OPT_TIMEOUTMS | ARES_OPT_SOCK_STATE_CB | + ARES_OPT_TRIES | ARES_OPT_QUERY_CACHE; + + if (max_timeout_ > 0) { + options.maxtimeout = max_timeout_; + optmask |= ARES_OPT_MAXTIMEOUTMS; + } + r = ares_init_options(&channel_, &options, optmask); if (r != ARES_SUCCESS) { diff --git a/src/cares_wrap.h b/src/cares_wrap.h index 081c8e0217a70f..dd62c2f6ff0527 100644 --- a/src/cares_wrap.h +++ b/src/cares_wrap.h @@ -151,11 +151,11 @@ struct NodeAresTask final : public MemoryRetainer { class ChannelWrap final : public AsyncWrap { public: - ChannelWrap( - Environment* env, - v8::Local object, - int timeout, - int tries); + ChannelWrap(Environment* env, + v8::Local object, + int timeout, + int tries, + int max_timeout); ~ChannelWrap() override; static void New(const v8::FunctionCallbackInfo& args); @@ -190,6 +190,7 @@ class ChannelWrap final : public AsyncWrap { bool library_inited_ = false; int timeout_; int tries_; + int max_timeout_; int active_query_count_ = 0; NodeAresTask::List task_list_; }; diff --git a/src/crypto/crypto_context.cc b/src/crypto/crypto_context.cc index 64b850089ec837..5e368e3fd93b56 100644 --- a/src/crypto/crypto_context.cc +++ b/src/crypto/crypto_context.cc @@ -27,6 +27,8 @@ #include #endif +#include + namespace node { using ncrypto::BignumPointer; @@ -81,10 +83,28 @@ static std::atomic has_cached_bundled_root_certs{false}; static std::atomic has_cached_system_root_certs{false}; static std::atomic has_cached_extra_root_certs{false}; +// Used for sets of X509. +struct X509Less { + bool operator()(const X509* lhs, const X509* rhs) const noexcept { + return X509_cmp(const_cast(lhs), const_cast(rhs)) < 0; + } +}; +using X509Set = std::set; + +// Per-thread root cert store. See NewRootCertStore() on what it contains. +static thread_local X509_STORE* root_cert_store = nullptr; +// If the user calls tls.setDefaultCACertificates() this will be used +// to hold the user-provided certificates, the root_cert_store and any new +// copy generated by NewRootCertStore() will then contain the certificates +// from this set. +static thread_local std::unique_ptr root_certs_from_users; + X509_STORE* GetOrCreateRootCertStore() { - // Guaranteed thread-safe by standard, just don't use -fno-threadsafe-statics. - static X509_STORE* store = NewRootCertStore(); - return store; + if (root_cert_store != nullptr) { + return root_cert_store; + } + root_cert_store = NewRootCertStore(); + return root_cert_store; } // Takes a string or buffer and loads it into a BIO. @@ -225,14 +245,11 @@ int SSL_CTX_use_certificate_chain(SSL_CTX* ctx, issuer); } -static unsigned long LoadCertsFromFile( // NOLINT(runtime/int) +static unsigned long LoadCertsFromBIO( // NOLINT(runtime/int) std::vector* certs, - const char* file) { + BIOPointer bio) { MarkPopErrorOnReturn mark_pop_error_on_return; - auto bio = BIOPointer::NewFile(file, "r"); - if (!bio) return ERR_get_error(); - while (X509* x509 = PEM_read_bio_X509( bio.get(), nullptr, NoPasswordCallback, nullptr)) { certs->push_back(x509); @@ -248,6 +265,17 @@ static unsigned long LoadCertsFromFile( // NOLINT(runtime/int) } } +static unsigned long LoadCertsFromFile( // NOLINT(runtime/int) + std::vector* certs, + const char* file) { + MarkPopErrorOnReturn mark_pop_error_on_return; + + auto bio = BIOPointer::NewFile(file, "r"); + if (!bio) return ERR_get_error(); + + return LoadCertsFromBIO(certs, std::move(bio)); +} + // Indicates the trust status of a certificate. enum class TrustStatus { // Trust status is unknown / uninitialized. @@ -829,11 +857,24 @@ static std::vector& GetExtraCACertificates() { // NODE_EXTRA_CA_CERTS are cached after first load. Certificates // from --use-system-ca are not cached and always reloaded from // disk. +// 8. If users have reset the root cert store by calling +// tls.setDefaultCACertificates(), the store will be populated with +// the certificates provided by users. // TODO(joyeecheung): maybe these rules need a bit of consolidation? X509_STORE* NewRootCertStore() { X509_STORE* store = X509_STORE_new(); CHECK_NOT_NULL(store); + // If the root cert store is already reset by users through + // tls.setDefaultCACertificates(), just create a copy from the + // user-provided certificates. + if (root_certs_from_users != nullptr) { + for (X509* cert : *root_certs_from_users) { + CHECK_EQ(1, X509_STORE_add_cert(store, cert)); + } + return store; + } + #ifdef NODE_OPENSSL_SYSTEM_CERT_PATH if constexpr (sizeof(NODE_OPENSSL_SYSTEM_CERT_PATH) > 1) { ERR_set_mark(); @@ -901,14 +942,57 @@ void GetBundledRootCertificates(const FunctionCallbackInfo& args) { Array::New(env->isolate(), result, arraysize(root_certs))); } +bool ArrayOfStringsToX509s(Local context, + Local cert_array, + std::vector* certs) { + ClearErrorOnReturn clear_error_on_return; + Isolate* isolate = context->GetIsolate(); + Environment* env = Environment::GetCurrent(context); + uint32_t array_length = cert_array->Length(); + + std::vector> cert_items; + if (FromV8Array(context, cert_array, &cert_items).IsNothing()) { + return false; + } + + for (uint32_t i = 0; i < array_length; i++) { + Local cert_val = cert_items[i].Get(isolate); + // Parse the PEM certificate. + BIOPointer bio(LoadBIO(env, cert_val)); + if (!bio) { + ThrowCryptoError(env, ERR_get_error(), "Failed to load certificate data"); + return false; + } + + // Read all certificates from this PEM string + size_t start = certs->size(); + auto err = LoadCertsFromBIO(certs, std::move(bio)); + if (err != 0) { + size_t end = certs->size(); + // Clean up any certificates we've already parsed upon failure. + for (size_t j = start; j < end; ++j) { + X509_free((*certs)[j]); + } + ThrowCryptoError(env, err, "Failed to parse certificate"); + return false; + } + } + + return true; +} + +template MaybeLocal X509sToArrayOfStrings(Environment* env, - const std::vector& certs) { + It first, + It last, + size_t size) { ClearErrorOnReturn clear_error_on_return; EscapableHandleScope scope(env->isolate()); - LocalVector result(env->isolate(), certs.size()); - for (size_t i = 0; i < certs.size(); ++i) { - X509View view(certs[i]); + LocalVector result(env->isolate(), size); + size_t i = 0; + for (It cur = first; cur != last; ++cur, ++i) { + X509View view(*cur); auto pem_bio = view.toPEM(); if (!pem_bio) { ThrowCryptoError(env, ERR_get_error(), "X509 to PEM conversion"); @@ -933,10 +1017,87 @@ MaybeLocal X509sToArrayOfStrings(Environment* env, return scope.Escape(Array::New(env->isolate(), result.data(), result.size())); } +void GetUserRootCertificates(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + CHECK_NOT_NULL(root_certs_from_users); + Local results; + if (X509sToArrayOfStrings(env, + root_certs_from_users->begin(), + root_certs_from_users->end(), + root_certs_from_users->size()) + .ToLocal(&results)) { + args.GetReturnValue().Set(results); + } +} + +void ResetRootCertStore(const FunctionCallbackInfo& args) { + Local context = args.GetIsolate()->GetCurrentContext(); + CHECK(args[0]->IsArray()); + Local cert_array = args[0].As(); + + if (cert_array->Length() == 0) { + // If the array is empty, just clear the user certs and reset the store. + if (root_cert_store != nullptr) { + X509_STORE_free(root_cert_store); + root_cert_store = nullptr; + } + + // Free any existing certificates in the old set. + if (root_certs_from_users != nullptr) { + for (X509* cert : *root_certs_from_users) { + X509_free(cert); + } + } + root_certs_from_users = std::make_unique(); + return; + } + + // Parse certificates from the array + std::unique_ptr> certs = + std::make_unique>(); + if (!ArrayOfStringsToX509s(context, cert_array, certs.get())) { + // Error already thrown by ArrayOfStringsToX509s + return; + } + + if (certs->empty()) { + Environment* env = Environment::GetCurrent(context); + return THROW_ERR_CRYPTO_OPERATION_FAILED( + env, "No valid certificates found in the provided array"); + } + + auto new_set = std::make_unique(); + for (X509* cert : *certs) { + auto [it, inserted] = new_set->insert(cert); + if (!inserted) { // Free duplicate certificates from the vector. + X509_free(cert); + } + } + + // Free any existing certificates in the old set. + if (root_certs_from_users != nullptr) { + for (X509* cert : *root_certs_from_users) { + X509_free(cert); + } + } + std::swap(root_certs_from_users, new_set); + + // Reset the global root cert store and create a new one with the + // certificates. + if (root_cert_store != nullptr) { + X509_STORE_free(root_cert_store); + } + + // TODO(joyeecheung): we can probably just reset it to nullptr + // and let the next call to NewRootCertStore() create a new one. + root_cert_store = NewRootCertStore(); +} + void GetSystemCACertificates(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); Local results; - if (X509sToArrayOfStrings(env, GetSystemStoreCACertificates()) + std::vector& certs = GetSystemStoreCACertificates(); + if (X509sToArrayOfStrings(env, certs.begin(), certs.end(), certs.size()) .ToLocal(&results)) { args.GetReturnValue().Set(results); } @@ -948,7 +1109,9 @@ void GetExtraCACertificates(const FunctionCallbackInfo& args) { return args.GetReturnValue().Set(Array::New(env->isolate())); } Local results; - if (X509sToArrayOfStrings(env, GetExtraCACertificates()).ToLocal(&results)) { + std::vector& certs = GetExtraCACertificates(); + if (X509sToArrayOfStrings(env, certs.begin(), certs.end(), certs.size()) + .ToLocal(&results)) { args.GetReturnValue().Set(results); } } @@ -1044,6 +1207,9 @@ void SecureContext::Initialize(Environment* env, Local target) { context, target, "getSystemCACertificates", GetSystemCACertificates); SetMethodNoSideEffect( context, target, "getExtraCACertificates", GetExtraCACertificates); + SetMethod(context, target, "resetRootCertStore", ResetRootCertStore); + SetMethodNoSideEffect( + context, target, "getUserRootCertificates", GetUserRootCertificates); } void SecureContext::RegisterExternalReferences( @@ -1086,6 +1252,8 @@ void SecureContext::RegisterExternalReferences( registry->Register(GetBundledRootCertificates); registry->Register(GetSystemCACertificates); registry->Register(GetExtraCACertificates); + registry->Register(ResetRootCertStore); + registry->Register(GetUserRootCertificates); } SecureContext* SecureContext::Create(Environment* env) { diff --git a/src/env_properties.h b/src/env_properties.h index d4961ac90fbc7f..5cb8dd86fe7127 100644 --- a/src/env_properties.h +++ b/src/env_properties.h @@ -461,6 +461,7 @@ V(tcp_constructor_template, v8::FunctionTemplate) \ V(tty_constructor_template, v8::FunctionTemplate) \ V(write_wrap_template, v8::ObjectTemplate) \ + V(worker_cpu_usage_taker_template, v8::ObjectTemplate) \ V(worker_heap_snapshot_taker_template, v8::ObjectTemplate) \ V(worker_heap_statistics_taker_template, v8::ObjectTemplate) \ V(x509_constructor_template, v8::FunctionTemplate) diff --git a/src/inspector/io_agent.cc b/src/inspector/io_agent.cc new file mode 100644 index 00000000000000..c66c091d69b747 --- /dev/null +++ b/src/inspector/io_agent.cc @@ -0,0 +1,57 @@ +#include "io_agent.h" +#include +#include +#include +#include +#include "crdtp/dispatch.h" +#include "inspector/network_resource_manager.h" + +namespace node::inspector::protocol { + +void IoAgent::Wire(UberDispatcher* dispatcher) { + frontend_ = std::make_shared(dispatcher->channel()); + IO::Dispatcher::wire(dispatcher, this); +} + +DispatchResponse IoAgent::read(const String& in_handle, + std::optional in_offset, + std::optional in_size, + String* out_data, + bool* out_eof) { + std::string url = in_handle; + std::string txt = network_resource_manager_->Get(url); + std::string_view txt_view(txt); + + int offset = 0; + bool offset_was_specified = false; + if (in_offset.has_value()) { + offset = *in_offset; + offset_was_specified = true; + } else if (offset_map_.contains(url)) { + offset = offset_map_[url]; + } + int size = 1 << 20; + if (in_size.has_value()) { + size = *in_size; + } + if (static_cast(offset) < txt_view.length()) { + std::string_view out_view = txt_view.substr(offset, size); + out_data->assign(out_view.data(), out_view.size()); + *out_eof = false; + if (!offset_was_specified) { + offset_map_[url] = offset + size; + } + } else { + *out_data = ""; + *out_eof = true; + } + + return DispatchResponse::Success(); +} + +DispatchResponse IoAgent::close(const String& in_handle) { + std::string url = in_handle; + network_resource_manager_->Erase(url); + return DispatchResponse::Success(); +} +} // namespace node::inspector::protocol diff --git a/src/inspector/io_agent.h b/src/inspector/io_agent.h new file mode 100644 index 00000000000000..4a12311bae32de --- /dev/null +++ b/src/inspector/io_agent.h @@ -0,0 +1,30 @@ +#ifndef SRC_INSPECTOR_IO_AGENT_H_ +#define SRC_INSPECTOR_IO_AGENT_H_ + +#include +#include "inspector/network_resource_manager.h" +#include "node/inspector/protocol/IO.h" + +namespace node::inspector::protocol { + +class IoAgent : public IO::Backend { + public: + explicit IoAgent( + std::shared_ptr network_resource_manager) + : network_resource_manager_(std::move(network_resource_manager)) {} + void Wire(UberDispatcher* dispatcher); + DispatchResponse read(const String& in_handle, + std::optional in_offset, + std::optional in_size, + String* out_data, + bool* out_eof) override; + DispatchResponse close(const String& in_handle) override; + + private: + std::shared_ptr frontend_; + std::unordered_map offset_map_ = + {}; // Maps stream_id to offset + std::shared_ptr network_resource_manager_; +}; +} // namespace node::inspector::protocol +#endif // SRC_INSPECTOR_IO_AGENT_H_ diff --git a/src/inspector/network_agent.cc b/src/inspector/network_agent.cc index 496b5c41a0bfc2..3b5d9615021101 100644 --- a/src/inspector/network_agent.cc +++ b/src/inspector/network_agent.cc @@ -1,8 +1,14 @@ #include "network_agent.h" +#include #include "debug_utils-inl.h" +#include "env-inl.h" +#include "inspector/network_resource_manager.h" #include "inspector/protocol_helper.h" #include "network_inspector.h" +#include "node_metadata.h" #include "util-inl.h" +#include "uv.h" +#include "v8-context.h" #include "v8.h" namespace node { @@ -202,9 +208,15 @@ std::unique_ptr createResponseFromObject( .build(); } -NetworkAgent::NetworkAgent(NetworkInspector* inspector, - v8_inspector::V8Inspector* v8_inspector) - : inspector_(inspector), v8_inspector_(v8_inspector) { +NetworkAgent::NetworkAgent( + NetworkInspector* inspector, + v8_inspector::V8Inspector* v8_inspector, + Environment* env, + std::shared_ptr network_resource_manager) + : inspector_(inspector), + v8_inspector_(v8_inspector), + env_(env), + network_resource_manager_(std::move(network_resource_manager)) { event_notifier_map_["requestWillBeSent"] = &NetworkAgent::requestWillBeSent; event_notifier_map_["responseReceived"] = &NetworkAgent::responseReceived; event_notifier_map_["loadingFailed"] = &NetworkAgent::loadingFailed; @@ -329,10 +341,38 @@ protocol::DispatchResponse NetworkAgent::streamResourceContent( // If the request is finished, remove the entry. requests_.erase(in_requestId); } - return protocol::DispatchResponse::Success(); } +protocol::DispatchResponse NetworkAgent::loadNetworkResource( + const protocol::String& in_url, + std::unique_ptr* + out_resource) { + if (!env_->options()->experimental_inspector_network_resource) { + return protocol::DispatchResponse::ServerError( + "Network resource loading is not enabled. This feature is " + "experimental and requires --experimental-inspector-network-resource " + "flag to be set."); + } + CHECK_NOT_NULL(network_resource_manager_); + std::string data = network_resource_manager_->Get(in_url); + bool found = !data.empty(); + if (found) { + auto result = protocol::Network::LoadNetworkResourcePageResult::create() + .setSuccess(true) + .setStream(in_url) + .build(); + *out_resource = std::move(result); + return protocol::DispatchResponse::Success(); + } else { + auto result = protocol::Network::LoadNetworkResourcePageResult::create() + .setSuccess(false) + .build(); + *out_resource = std::move(result); + return protocol::DispatchResponse::Success(); + } +} + void NetworkAgent::requestWillBeSent(v8::Local context, v8::Local params) { protocol::String request_id; diff --git a/src/inspector/network_agent.h b/src/inspector/network_agent.h index c5303885d61e18..f814c7f5cf6662 100644 --- a/src/inspector/network_agent.h +++ b/src/inspector/network_agent.h @@ -1,9 +1,13 @@ #ifndef SRC_INSPECTOR_NETWORK_AGENT_H_ #define SRC_INSPECTOR_NETWORK_AGENT_H_ +#include "env.h" +#include "io_agent.h" +#include "network_resource_manager.h" #include "node/inspector/protocol/Network.h" #include +#include #include namespace node { @@ -38,8 +42,11 @@ struct RequestEntry { class NetworkAgent : public protocol::Network::Backend { public: - explicit NetworkAgent(NetworkInspector* inspector, - v8_inspector::V8Inspector* v8_inspector); + explicit NetworkAgent( + NetworkInspector* inspector, + v8_inspector::V8Inspector* v8_inspector, + Environment* env, + std::shared_ptr network_resource_manager); void Wire(protocol::UberDispatcher* dispatcher); @@ -60,6 +67,11 @@ class NetworkAgent : public protocol::Network::Backend { const protocol::String& in_requestId, protocol::Binary* out_bufferedData) override; + protocol::DispatchResponse loadNetworkResource( + const protocol::String& in_url, + std::unique_ptr* + out_resource) override; + void emitNotification(v8::Local context, const protocol::String& event, v8::Local params); @@ -89,6 +101,8 @@ class NetworkAgent : public protocol::Network::Backend { v8::Local); std::unordered_map event_notifier_map_; std::map requests_; + Environment* env_; + std::shared_ptr network_resource_manager_; }; } // namespace inspector diff --git a/src/inspector/network_inspector.cc b/src/inspector/network_inspector.cc index e93db7bbe922f6..3a79ba988571ca 100644 --- a/src/inspector/network_inspector.cc +++ b/src/inspector/network_inspector.cc @@ -3,10 +3,15 @@ namespace node { namespace inspector { -NetworkInspector::NetworkInspector(Environment* env, - v8_inspector::V8Inspector* v8_inspector) - : enabled_(false), env_(env) { - network_agent_ = std::make_unique(this, v8_inspector); +NetworkInspector::NetworkInspector( + Environment* env, + v8_inspector::V8Inspector* v8_inspector, + std::shared_ptr network_resource_manager) + : enabled_(false), + env_(env), + network_resource_manager_(std::move(network_resource_manager)) { + network_agent_ = std::make_unique( + this, v8_inspector, env, network_resource_manager_); } NetworkInspector::~NetworkInspector() { network_agent_.reset(); diff --git a/src/inspector/network_inspector.h b/src/inspector/network_inspector.h index 4095a05394cd8a..bcdca254230ea9 100644 --- a/src/inspector/network_inspector.h +++ b/src/inspector/network_inspector.h @@ -1,8 +1,10 @@ #ifndef SRC_INSPECTOR_NETWORK_INSPECTOR_H_ #define SRC_INSPECTOR_NETWORK_INSPECTOR_H_ +#include #include "env.h" #include "network_agent.h" +#include "network_resource_manager.h" namespace node { class Environment; @@ -11,8 +13,10 @@ namespace inspector { class NetworkInspector { public: - explicit NetworkInspector(Environment* env, - v8_inspector::V8Inspector* v8_inspector); + explicit NetworkInspector( + Environment* env, + v8_inspector::V8Inspector* v8_inspector, + std::shared_ptr network_resource_manager); ~NetworkInspector(); void Wire(protocol::UberDispatcher* dispatcher); @@ -32,6 +36,7 @@ class NetworkInspector { bool enabled_; Environment* env_; std::unique_ptr network_agent_; + std::shared_ptr network_resource_manager_; }; } // namespace inspector diff --git a/src/inspector/network_resource_manager.cc b/src/inspector/network_resource_manager.cc new file mode 100644 index 00000000000000..47c625164694ee --- /dev/null +++ b/src/inspector/network_resource_manager.cc @@ -0,0 +1,29 @@ +#include "inspector/network_resource_manager.h" +#include +#include +#include +#include + +namespace node { +namespace inspector { + +void NetworkResourceManager::Put(const std::string& url, + const std::string& data) { + Mutex::ScopedLock lock(mutex_); + resources_[url] = data; +} + +std::string NetworkResourceManager::Get(const std::string& url) { + Mutex::ScopedLock lock(mutex_); + auto it = resources_.find(url); + if (it != resources_.end()) return it->second; + return {}; +} + +void NetworkResourceManager::Erase(const std::string& stream_id) { + Mutex::ScopedLock lock(mutex_); + resources_.erase(stream_id); +} + +} // namespace inspector +} // namespace node diff --git a/src/inspector/network_resource_manager.h b/src/inspector/network_resource_manager.h new file mode 100644 index 00000000000000..10a058ddc48bd4 --- /dev/null +++ b/src/inspector/network_resource_manager.h @@ -0,0 +1,29 @@ +// network_resource_manager.h +#ifndef SRC_INSPECTOR_NETWORK_RESOURCE_MANAGER_H_ +#define SRC_INSPECTOR_NETWORK_RESOURCE_MANAGER_H_ + +#include +#include +#include +#include "node_mutex.h" + +namespace node { +namespace inspector { + +class NetworkResourceManager { + public: + void Put(const std::string& url, const std::string& data); + std::string Get(const std::string& url); + + // Erase resource and mapping by stream id + void Erase(const std::string& stream_id); + + private: + std::unordered_map resources_; + Mutex mutex_; // Protects access to resources_ +}; + +} // namespace inspector +} // namespace node + +#endif // SRC_INSPECTOR_NETWORK_RESOURCE_MANAGER_H_ diff --git a/src/inspector/node_inspector.gypi b/src/inspector/node_inspector.gypi index 176663780afc95..ad81f837e84d76 100644 --- a/src/inspector/node_inspector.gypi +++ b/src/inspector/node_inspector.gypi @@ -36,6 +36,10 @@ 'src/inspector/target_agent.h', 'src/inspector/worker_inspector.cc', 'src/inspector/worker_inspector.h', + 'src/inspector/io_agent.cc', + 'src/inspector/io_agent.h', + 'src/inspector/network_resource_manager.cc', + 'src/inspector/network_resource_manager.h', ], 'node_inspector_generated_sources': [ '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/Forward.h', @@ -51,6 +55,8 @@ '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/Network.h', '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/Target.cpp', '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/Target.h', + '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/IO.h', + '<(SHARED_INTERMEDIATE_DIR)/src/node/inspector/protocol/IO.cpp', ], 'node_protocol_files': [ '<(protocol_tool_path)/lib/Forward_h.template', diff --git a/src/inspector/node_protocol.pdl b/src/inspector/node_protocol.pdl index 3608bfd317022c..46631bc20ad081 100644 --- a/src/inspector/node_protocol.pdl +++ b/src/inspector/node_protocol.pdl @@ -180,6 +180,11 @@ experimental domain Network # Request / response headers as keys / values of JSON object. type Headers extends object + type LoadNetworkResourcePageResult extends object + properties + boolean success + optional IO.StreamHandle stream + # Disables network tracking, prevents network events from being sent to the client. command disable @@ -215,6 +220,13 @@ experimental domain Network returns # Data that has been buffered until streaming is enabled. binary bufferedData + # Fetches the resource and returns the content. + command loadNetworkResource + parameters + # URL of the resource to get content for. + string url + returns + LoadNetworkResourcePageResult resource # Fired when page is about to send HTTP request. event requestWillBeSent @@ -321,3 +333,24 @@ experimental domain Target parameters boolean autoAttach boolean waitForDebuggerOnStart +domain IO + type StreamHandle extends string + # Read a chunk of the stream + command read + parameters + # Handle of the stream to read. + StreamHandle handle + # Seek to the specified offset before reading (if not specified, proceed with offset + # following the last read). Some types of streams may only support sequential reads. + optional integer offset + # Maximum number of bytes to read (left upon the agent discretion if not specified). + optional integer size + returns + # Data that were read. + string data + # Set if the end-of-file condition occurred while reading. + boolean eof + command close + parameters + # Handle of the stream to close. + StreamHandle handle diff --git a/src/inspector/worker_inspector.cc b/src/inspector/worker_inspector.cc index fd479181e9566c..58c7d2602b327e 100644 --- a/src/inspector/worker_inspector.cc +++ b/src/inspector/worker_inspector.cc @@ -60,12 +60,14 @@ ParentInspectorHandle::ParentInspectorHandle( const std::string& url, std::shared_ptr parent_thread, bool wait_for_connect, - const std::string& name) + const std::string& name, + std::shared_ptr network_resource_manager) : id_(id), url_(url), parent_thread_(parent_thread), wait_(wait_for_connect), - name_(name) {} + name_(name), + network_resource_manager_(network_resource_manager) {} ParentInspectorHandle::~ParentInspectorHandle() { parent_thread_->Post( @@ -101,10 +103,13 @@ void WorkerManager::WorkerStarted(uint64_t session_id, } std::unique_ptr WorkerManager::NewParentHandle( - uint64_t thread_id, const std::string& url, const std::string& name) { + uint64_t thread_id, + const std::string& url, + const std::string& name, + std::shared_ptr network_resource_manager) { bool wait = !delegates_waiting_on_start_.empty(); return std::make_unique( - thread_id, url, thread_, wait, name); + thread_id, url, thread_, wait, name, network_resource_manager); } void WorkerManager::RemoveAttachDelegate(int id) { diff --git a/src/inspector/worker_inspector.h b/src/inspector/worker_inspector.h index 24403bb1704c40..28a249aea4d91c 100644 --- a/src/inspector/worker_inspector.h +++ b/src/inspector/worker_inspector.h @@ -1,6 +1,7 @@ #ifndef SRC_INSPECTOR_WORKER_INSPECTOR_H_ #define SRC_INSPECTOR_WORKER_INSPECTOR_H_ +#include "inspector/network_resource_manager.h" #if !HAVE_INSPECTOR #error("This header can only be used when inspector is enabled") #endif @@ -54,16 +55,18 @@ struct WorkerInfo { class ParentInspectorHandle { public: - ParentInspectorHandle(uint64_t id, - const std::string& url, - std::shared_ptr parent_thread, - bool wait_for_connect, - const std::string& name); + ParentInspectorHandle( + uint64_t id, + const std::string& url, + std::shared_ptr parent_thread, + bool wait_for_connect, + const std::string& name, + std::shared_ptr network_resource_manager); ~ParentInspectorHandle(); std::unique_ptr NewParentInspectorHandle( uint64_t thread_id, const std::string& url, const std::string& name) { return std::make_unique( - thread_id, url, parent_thread_, wait_, name); + thread_id, url, parent_thread_, wait_, name, network_resource_manager_); } void WorkerStarted(std::shared_ptr worker_thread, bool waiting); @@ -74,6 +77,9 @@ class ParentInspectorHandle { std::unique_ptr Connect( std::unique_ptr delegate, bool prevent_shutdown); + std::shared_ptr GetNetworkResourceManager() { + return network_resource_manager_; + } private: uint64_t id_; @@ -81,6 +87,7 @@ class ParentInspectorHandle { std::shared_ptr parent_thread_; bool wait_; std::string name_; + std::shared_ptr network_resource_manager_; }; class WorkerManager : public std::enable_shared_from_this { @@ -89,7 +96,10 @@ class WorkerManager : public std::enable_shared_from_this { : thread_(thread) {} std::unique_ptr NewParentHandle( - uint64_t thread_id, const std::string& url, const std::string& name); + uint64_t thread_id, + const std::string& url, + const std::string& name, + std::shared_ptr network_resource_manager); void WorkerStarted(uint64_t session_id, const WorkerInfo& info, bool waiting); void WorkerFinished(uint64_t session_id); std::unique_ptr SetAutoAttach( diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index fa6eb29e6e5829..aca49fddce0f05 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -13,6 +13,7 @@ #include "inspector/worker_agent.h" #include "inspector/worker_inspector.h" #include "inspector_io.h" +#include "node.h" #include "node/inspector/protocol/Protocol.h" #include "node_errors.h" #include "node_internals.h" @@ -121,12 +122,11 @@ static int StartDebugSignalHandler() { CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, &savemask)); sigmask = savemask; pthread_t thread; - const int err = pthread_create(&thread, &attr, - StartIoThreadMain, nullptr); - // Restore original mask - CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, nullptr)); + const int err = pthread_create(&thread, &attr, StartIoThreadMain, nullptr); CHECK_EQ(0, pthread_attr_destroy(&attr)); if (err != 0) { + // Restore original mask + CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, nullptr)); fprintf(stderr, "node[%u]: pthread_create: %s\n", uv_os_getpid(), strerror(err)); fflush(stderr); @@ -135,6 +135,8 @@ static int StartDebugSignalHandler() { return -err; } RegisterSignalHandler(SIGUSR1, StartIoThreadWakeup); + // Restore original mask + CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, nullptr)); // Unblock SIGUSR1. A pending SIGUSR1 signal will now be delivered. sigemptyset(&sigmask); sigaddset(&sigmask, SIGUSR1); @@ -238,8 +240,18 @@ class ChannelImpl final : public v8_inspector::V8Inspector::Channel, } runtime_agent_ = std::make_unique(); runtime_agent_->Wire(node_dispatcher_.get()); - network_inspector_ = - std::make_unique(env, inspector.get()); + if (env->options()->experimental_inspector_network_resource) { + io_agent_ = std::make_unique( + env->inspector_agent()->GetNetworkResourceManager()); + io_agent_->Wire(node_dispatcher_.get()); + network_inspector_ = std::make_unique( + env, + inspector.get(), + env->inspector_agent()->GetNetworkResourceManager()); + } else { + network_inspector_ = + std::make_unique(env, inspector.get(), nullptr); + } network_inspector_->Wire(node_dispatcher_.get()); if (env->options()->experimental_worker_inspection) { target_agent_ = std::make_shared(); @@ -405,6 +417,7 @@ class ChannelImpl final : public v8_inspector::V8Inspector::Channel, std::unique_ptr worker_agent_; std::shared_ptr target_agent_; std::unique_ptr network_inspector_; + std::shared_ptr io_agent_; std::unique_ptr delegate_; std::unique_ptr session_; std::unique_ptr node_dispatcher_; @@ -1153,7 +1166,8 @@ std::unique_ptr Agent::GetParentHandle( CHECK_NOT_NULL(client_); if (!parent_handle_) { - return client_->getWorkerManager()->NewParentHandle(thread_id, url, name); + return client_->getWorkerManager()->NewParentHandle( + thread_id, url, name, GetNetworkResourceManager()); } else { return parent_handle_->NewParentInspectorHandle(thread_id, url, name); } @@ -1219,6 +1233,17 @@ std::shared_ptr Agent::GetWorkerManager() { return client_->getWorkerManager(); } +std::shared_ptr Agent::GetNetworkResourceManager() { + if (parent_handle_) { + return parent_handle_->GetNetworkResourceManager(); + } else if (network_resource_manager_) { + return network_resource_manager_; + } else { + network_resource_manager_ = std::make_shared(); + return network_resource_manager_; + } +} + std::string Agent::GetWsUrl() const { if (io_ == nullptr) return ""; diff --git a/src/inspector_agent.h b/src/inspector_agent.h index ad7a8e6c069968..e43dced8f410f3 100644 --- a/src/inspector_agent.h +++ b/src/inspector_agent.h @@ -1,5 +1,6 @@ #pragma once +#include "inspector/network_resource_manager.h" #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #if !HAVE_INSPECTOR @@ -127,6 +128,7 @@ class Agent { std::shared_ptr GetWorkerManager(); inline Environment* env() const { return parent_env_; } + std::shared_ptr GetNetworkResourceManager(); private: void ToggleAsyncHook(v8::Isolate* isolate, v8::Local fn); @@ -153,6 +155,7 @@ class Agent { bool network_tracking_enabled_ = false; bool pending_enable_network_tracking = false; bool pending_disable_network_tracking = false; + std::shared_ptr network_resource_manager_; }; } // namespace inspector diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 69029247accf5b..64823c68b11e94 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -1,4 +1,5 @@ #include "base_object-inl.h" +#include "inspector/network_resource_manager.h" #include "inspector/protocol_helper.h" #include "inspector_agent.h" #include "inspector_io.h" @@ -334,6 +335,18 @@ void Url(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(OneByteString(env->isolate(), url)); } +void PutNetworkResource(const v8::FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + CHECK_GE(args.Length(), 2); + CHECK(args[0]->IsString()); + CHECK(args[1]->IsString()); + + Utf8Value url(env->isolate(), args[0].As()); + Utf8Value data(env->isolate(), args[1].As()); + + env->inspector_agent()->GetNetworkResourceManager()->Put(*url, *data); +} + void Initialize(Local target, Local unused, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); @@ -378,6 +391,7 @@ void Initialize(Local target, Local unused, SetMethodNoSideEffect(context, target, "isEnabled", IsEnabled); SetMethod(context, target, "emitProtocolEvent", EmitProtocolEvent); SetMethod(context, target, "setupNetworkTracking", SetupNetworkTracking); + SetMethod(context, target, "putNetworkResource", PutNetworkResource); Local console_string = FIXED_ONE_BYTE_STRING(isolate, "console"); @@ -420,6 +434,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(JSBindingsConnection::New); registry->Register(JSBindingsConnection::Dispatch); registry->Register(JSBindingsConnection::Disconnect); + registry->Register(PutNetworkResource); } } // namespace inspector diff --git a/src/inspector_profiler.cc b/src/inspector_profiler.cc index f09dd04ccd7f6e..dd57c779595c80 100644 --- a/src/inspector_profiler.cc +++ b/src/inspector_profiler.cc @@ -8,6 +8,7 @@ #include "node_file.h" #include "node_internals.h" #include "util-inl.h" +#include "uv.h" #include "v8-inspector.h" #include @@ -465,6 +466,27 @@ static void EndStartedProfilers(Environment* env) { } } +static std::string ReplacePlaceholders(const std::string& pattern) { + std::string result = pattern; + + static const std::unordered_map> + kPlaceholderMap = { + {"${pid}", []() { return std::to_string(uv_os_getpid()); }}, + // TODO(haramj): Add more placeholders as needed. + }; + + for (const auto& [placeholder, getter] : kPlaceholderMap) { + size_t pos = 0; + while ((pos = result.find(placeholder, pos)) != std::string::npos) { + const std::string value = getter(); + result.replace(pos, placeholder.length(), value); + pos += value.length(); + } + } + + return result; +} + void StartProfilers(Environment* env) { AtExit(env, [](void* env) { EndStartedProfilers(static_cast(env)); @@ -486,7 +508,9 @@ void StartProfilers(Environment* env) { DiagnosticFilename filename(env, "CPU", "cpuprofile"); env->set_cpu_prof_name(*filename); } else { - env->set_cpu_prof_name(env->options()->cpu_prof_name); + std::string resolved_name = + ReplacePlaceholders(env->options()->cpu_prof_name); + env->set_cpu_prof_name(resolved_name); } CHECK_NULL(env->cpu_profiler_connection()); env->set_cpu_profiler_connection( diff --git a/src/inspector_profiler.h b/src/inspector_profiler.h index fd741c1f1ff659..d07334c818d90e 100644 --- a/src/inspector_profiler.h +++ b/src/inspector_profiler.h @@ -65,9 +65,7 @@ class V8ProfilerConnection { simdjson::ondemand::object* result); virtual void WriteProfile(simdjson::ondemand::object* result); - bool HasProfileId(uint64_t id) const { - return profile_ids_.find(id) != profile_ids_.end(); - } + bool HasProfileId(uint64_t id) const { return profile_ids_.contains(id); } void RemoveProfileId(uint64_t id) { profile_ids_.erase(id); } diff --git a/src/node.cc b/src/node.cc index c0d0b734edfa72..1b5e989e5456a9 100644 --- a/src/node.cc +++ b/src/node.cc @@ -826,6 +826,11 @@ static ExitCode ProcessGlobalArgsInternal(std::vector* args, env_opts->abort_on_uncaught_exception = true; } + // Support stable Phase 5 WebAssembly proposals + v8_args.emplace_back("--experimental-wasm-imported-strings"); + v8_args.emplace_back("--experimental-wasm-memory64"); + v8_args.emplace_back("--experimental-wasm-exnref"); + #ifdef __POSIX__ // Block SIGPROF signals when sleeping in epoll_wait/kevent/etc. Avoids the // performance penalty of frequent EINTR wakeups when the profiler is running. @@ -912,6 +917,15 @@ static ExitCode InitializeNodeWithArgsInternal( // default value. V8::SetFlagsFromString("--rehash-snapshot"); +#if HAVE_OPENSSL + // TODO(joyeecheung): make this a per-env option and move the normalization + // into HandleEnvOptions. + std::string use_system_ca; + if (credentials::SafeGetenv("NODE_USE_SYSTEM_CA", &use_system_ca) && + use_system_ca == "1") { + per_process::cli_options->use_system_ca = true; + } +#endif // HAVE_OPENSSL HandleEnvOptions(per_process::cli_options->per_isolate->per_env); std::string node_options; diff --git a/src/node_blob.cc b/src/node_blob.cc index 3df23669ceb80b..9b9956f5ee3150 100644 --- a/src/node_blob.cc +++ b/src/node_blob.cc @@ -534,11 +534,11 @@ void BlobBindingData::store_data_object( } void BlobBindingData::revoke_data_object(const std::string& uuid) { - if (data_objects_.find(uuid) == data_objects_.end()) { + if (!data_objects_.contains(uuid)) { return; } data_objects_.erase(uuid); - CHECK_EQ(data_objects_.find(uuid), data_objects_.end()); + CHECK(!data_objects_.contains(uuid)); } BlobBindingData::StoredDataObject BlobBindingData::get_data_object( diff --git a/src/node_buffer.cc b/src/node_buffer.cc index d0338b8a2d9cd3..e39852c8e0392e 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -1046,8 +1046,9 @@ void IndexOfBuffer(const FunctionCallbackInfo& args) { enum encoding enc = static_cast(args[3].As()->Value()); - THROW_AND_RETURN_UNLESS_BUFFER(Environment::GetCurrent(args), args[0]); - THROW_AND_RETURN_UNLESS_BUFFER(Environment::GetCurrent(args), args[1]); + Environment* env = Environment::GetCurrent(args); + THROW_AND_RETURN_UNLESS_BUFFER(env, args[0]); + THROW_AND_RETURN_UNLESS_BUFFER(env, args[1]); ArrayBufferViewContents haystack_contents(args[0]); ArrayBufferViewContents needle_contents(args[1]); int64_t offset_i64 = args[2].As()->Value(); diff --git a/src/node_builtins.cc b/src/node_builtins.cc index abf1583cdac9f1..b4acc40618e372 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -307,7 +307,7 @@ MaybeLocal BuiltinLoader::LookupAndCompileInternal( if (should_eager_compile_) { options = ScriptCompiler::kEagerCompile; } else if (!to_eager_compile_.empty()) { - if (to_eager_compile_.find(id) != to_eager_compile_.end()) { + if (to_eager_compile_.contains(id)) { options = ScriptCompiler::kEagerCompile; } } diff --git a/src/node_debug.cc b/src/node_debug.cc index 0f254b1fbfe820..bb0e11747e418c 100644 --- a/src/node_debug.cc +++ b/src/node_debug.cc @@ -23,13 +23,14 @@ using v8::Number; using v8::Object; using v8::Value; -thread_local std::unordered_map v8_fast_api_call_counts; +thread_local std::unordered_map + v8_fast_api_call_counts; -void TrackV8FastApiCall(std::string_view key) { +void TrackV8FastApiCall(FastStringKey key) { v8_fast_api_call_counts[key]++; } -int GetV8FastApiCallCount(std::string_view key) { +int GetV8FastApiCallCount(FastStringKey key) { return v8_fast_api_call_counts[key]; } @@ -40,7 +41,8 @@ void GetV8FastApiCallCount(const FunctionCallbackInfo& args) { return; } Utf8Value utf8_key(env->isolate(), args[0]); - args.GetReturnValue().Set(GetV8FastApiCallCount(utf8_key.ToStringView())); + args.GetReturnValue().Set(GetV8FastApiCallCount( + FastStringKey::AllowDynamic(utf8_key.ToStringView()))); } void SlowIsEven(const FunctionCallbackInfo& args) { diff --git a/src/node_debug.h b/src/node_debug.h index f42aeb8ba01162..32fbf21c813366 100644 --- a/src/node_debug.h +++ b/src/node_debug.h @@ -3,17 +3,18 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #ifdef DEBUG -#include +#include "util.h" #endif // DEBUG namespace node { namespace debug { #ifdef DEBUG -void TrackV8FastApiCall(std::string_view key); -int GetV8FastApiCallCount(std::string_view key); +void TrackV8FastApiCall(FastStringKey key); +int GetV8FastApiCallCount(FastStringKey key); -#define TRACK_V8_FAST_API_CALL(key) node::debug::TrackV8FastApiCall(key) +#define TRACK_V8_FAST_API_CALL(key) \ + node::debug::TrackV8FastApiCall(FastStringKey(key)) #else // !DEBUG #define TRACK_V8_FAST_API_CALL(key) #endif // DEBUG diff --git a/src/node_env_var.cc b/src/node_env_var.cc index 8568f3d1fb6b8b..492d5f455f45a5 100644 --- a/src/node_env_var.cc +++ b/src/node_env_var.cc @@ -258,7 +258,7 @@ void MapKVStore::Set(Isolate* isolate, Local key, Local value) { int32_t MapKVStore::Query(const char* key) const { Mutex::ScopedLock lock(mutex_); - return map_.find(key) == map_.end() ? -1 : 0; + return map_.contains(key) ? 0 : -1; } int32_t MapKVStore::Query(Isolate* isolate, Local key) const { diff --git a/src/node_file.cc b/src/node_file.cc index 7d174113a22cb2..c8e7e341b1d9f5 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -1140,6 +1140,7 @@ static void LStat(const FunctionCallbackInfo& args) { bool use_bigint = args[1]->IsTrue(); if (!args[2]->IsUndefined()) { // lstat(path, use_bigint, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2, use_bigint); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_LSTAT, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "lstat", UTF8, AfterStat, @@ -1182,6 +1183,7 @@ static void FStat(const FunctionCallbackInfo& args) { bool use_bigint = args[1]->IsTrue(); if (!args[2]->IsUndefined()) { // fstat(fd, use_bigint, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2, use_bigint); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_FSTAT, req_wrap_async) AsyncCall(env, req_wrap_async, args, "fstat", UTF8, AfterStat, uv_fs_fstat, fd); @@ -1283,6 +1285,7 @@ static void Symlink(const FunctionCallbackInfo& args) { if (argc > 3) { // symlink(target, path, flags, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN2(UV_FS_SYMLINK, req_wrap_async, "target", @@ -1321,6 +1324,7 @@ static void Link(const FunctionCallbackInfo& args) { if (argc > 2) { // link(src, dest, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); // To avoid bypass the link target should be allowed to read and write ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, @@ -1379,6 +1383,7 @@ static void ReadLink(const FunctionCallbackInfo& args) { if (argc > 2) { // readlink(path, encoding, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_READLINK, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "readlink", encoding, AfterStringPtr, @@ -1425,6 +1430,7 @@ static void Rename(const FunctionCallbackInfo& args) { if (argc > 2) { // rename(old_path, new_path, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, req_wrap_async, @@ -1482,6 +1488,7 @@ static void FTruncate(const FunctionCallbackInfo& args) { if (argc > 2) { // ftruncate(fd, len, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_FTRUNCATE, req_wrap_async) AsyncCall(env, req_wrap_async, args, "ftruncate", UTF8, AfterNoArgs, uv_fs_ftruncate, fd, len); @@ -1591,6 +1598,7 @@ static void RMDir(const FunctionCallbackInfo& args) { if (argc > 1) { FSReqBase* req_wrap_async = GetReqWrap(args, 1); // rmdir(path, req) + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_RMDIR, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "rmdir", UTF8, AfterNoArgs, @@ -1788,6 +1796,7 @@ static void MKDir(const FunctionCallbackInfo& args) { if (argc > 3) { // mkdir(path, mode, recursive, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_UNLINK, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "mkdir", UTF8, @@ -1839,6 +1848,7 @@ static void RealPath(const FunctionCallbackInfo& args) { if (argc > 2) { // realpath(path, encoding, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_REALPATH, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "realpath", encoding, AfterStringPtr, @@ -1906,6 +1916,7 @@ static void ReadDir(const FunctionCallbackInfo& args) { if (argc > 3) { // readdir(path, encoding, withTypes, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, req_wrap_async, @@ -2146,6 +2157,7 @@ static void CopyFile(const FunctionCallbackInfo& args) { if (argc > 3) { // copyFile(src, dest, flags, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, req_wrap_async, @@ -2269,6 +2281,7 @@ static void WriteBuffers(const FunctionCallbackInfo& args) { if (argc > 3) { // writeBuffers(fd, chunks, pos, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_WRITE, req_wrap_async) AsyncCall(env, req_wrap_async, @@ -2643,6 +2656,7 @@ static void ReadBuffers(const FunctionCallbackInfo& args) { if (argc > 3) { // readBuffers(fd, buffers, pos, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_READ, req_wrap_async) AsyncCall(env, req_wrap_async, args, "read", UTF8, AfterInteger, uv_fs_read, fd, *iovs, iovs.length(), pos); @@ -2680,6 +2694,7 @@ static void Chmod(const FunctionCallbackInfo& args) { if (argc > 2) { // chmod(path, mode, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_CHMOD, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "chmod", UTF8, AfterNoArgs, @@ -2712,6 +2727,7 @@ static void FChmod(const FunctionCallbackInfo& args) { if (argc > 2) { // fchmod(fd, mode, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_FCHMOD, req_wrap_async) AsyncCall(env, req_wrap_async, args, "fchmod", UTF8, AfterNoArgs, uv_fs_fchmod, fd, mode); @@ -2789,6 +2805,7 @@ static void FChown(const FunctionCallbackInfo& args) { if (argc > 3) { // fchown(fd, uid, gid, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_FCHOWN, req_wrap_async) AsyncCall(env, req_wrap_async, args, "fchown", UTF8, AfterNoArgs, uv_fs_fchown, fd, uid, gid); @@ -2819,6 +2836,7 @@ static void LChown(const FunctionCallbackInfo& args) { if (argc > 3) { // lchown(path, uid, gid, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, req_wrap_async, @@ -2861,6 +2879,7 @@ static void UTimes(const FunctionCallbackInfo& args) { if (argc > 3) { // utimes(path, atime, mtime, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_UTIME, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "utime", UTF8, AfterNoArgs, @@ -2893,6 +2912,7 @@ static void FUTimes(const FunctionCallbackInfo& args) { if (argc > 3) { // futimes(fd, atime, mtime, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN0(UV_FS_FUTIME, req_wrap_async) AsyncCall(env, req_wrap_async, args, "futime", UTF8, AfterNoArgs, uv_fs_futime, fd, atime, mtime); @@ -2925,6 +2945,7 @@ static void LUTimes(const FunctionCallbackInfo& args) { if (argc > 3) { // lutimes(path, atime, mtime, req) FSReqBase* req_wrap_async = GetReqWrap(args, 3); + CHECK_NOT_NULL(req_wrap_async); FS_ASYNC_TRACE_BEGIN1( UV_FS_LUTIME, req_wrap_async, "path", TRACE_STR_COPY(*path)) AsyncCall(env, req_wrap_async, args, "lutime", UTF8, AfterNoArgs, @@ -2957,6 +2978,7 @@ static void Mkdtemp(const FunctionCallbackInfo& args) { if (argc > 2) { // mkdtemp(tmpl, encoding, req) FSReqBase* req_wrap_async = GetReqWrap(args, 2); + CHECK_NOT_NULL(req_wrap_async); ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( env, req_wrap_async, diff --git a/src/node_messaging.cc b/src/node_messaging.cc index 42d0e32a9ec21b..66c8868b9d8e69 100644 --- a/src/node_messaging.cc +++ b/src/node_messaging.cc @@ -1508,7 +1508,7 @@ Maybe SiblingGroup::Dispatch( RwLock::ScopedReadLock lock(group_mutex_); // The source MessagePortData is not part of this group. - if (ports_.find(source) == ports_.end()) { + if (!ports_.contains(source)) { if (error != nullptr) *error = "Source MessagePort is not entangled with this group."; return Nothing(); diff --git a/src/node_options.cc b/src/node_options.cc index 228fbe645587ab..249361e351946c 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -479,10 +479,7 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { kAllowedInEnvvar); AddAlias("--loader", "--experimental-loader"); AddOption("--experimental-modules", "", NoOp{}, kAllowedInEnvvar); - AddOption("--experimental-wasm-modules", - "experimental ES Module support for webassembly modules", - &EnvironmentOptions::experimental_wasm_modules, - kAllowedInEnvvar); + AddOption("--experimental-wasm-modules", "", NoOp{}, kAllowedInEnvvar); AddOption("--experimental-import-meta-resolve", "experimental ES Module import.meta.resolve() parentURL support", &EnvironmentOptions::experimental_import_meta_resolve, @@ -650,6 +647,9 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { AddOption("--experimental-worker-inspection", "experimental worker inspection support", &EnvironmentOptions::experimental_worker_inspection); + AddOption("--experimental-inspector-network-resource", + "experimental load network resources via the inspector", + &EnvironmentOptions::experimental_inspector_network_resource); AddOption( "--heap-prof", "Start the V8 heap profiler on start up, and write the heap profile " diff --git a/src/node_options.h b/src/node_options.h index 0b75516eb42692..2a1a6aaf9f2d35 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -130,7 +130,6 @@ class EnvironmentOptions : public Options { bool experimental_global_customevent = true; bool experimental_global_navigator = true; bool experimental_global_web_crypto = true; - bool experimental_wasm_modules = false; bool experimental_import_meta_resolve = false; std::string input_type; // Value of --input-type std::string type; // Value of --experimental-default-type @@ -172,6 +171,7 @@ class EnvironmentOptions : public Options { bool cpu_prof = false; bool experimental_network_inspection = false; bool experimental_worker_inspection = false; + bool experimental_inspector_network_resource = false; std::string heap_prof_dir; std::string heap_prof_name; static const uint64_t kDefaultHeapProfInterval = 512 * 1024; diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index f46ad6b9c81b45..1cb08b715865f8 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -133,6 +133,29 @@ static void CPUUsage(const FunctionCallbackInfo& args) { fields[1] = MICROS_PER_SEC * rusage.ru_stime.tv_sec + rusage.ru_stime.tv_usec; } +// ThreadCPUUsage use libuv's uv_getrusage_thread() this-thread resource usage +// accessor, to access ru_utime (user CPU time used) and ru_stime +// (system CPU time used), which are uv_timeval_t structs +// (long tv_sec, long tv_usec). +// Returns those values as Float64 microseconds in the elements of the array +// passed to the function. +static void ThreadCPUUsage(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + uv_rusage_t rusage; + + // Call libuv to get the values we'll return. + int err = uv_getrusage_thread(&rusage); + if (err) return env->ThrowUVException(err, "uv_getrusage_thread"); + + // Get the double array pointer from the Float64Array argument. + Local ab = get_fields_array_buffer(args, 0, 2); + double* fields = static_cast(ab->Data()); + + // Set the Float64Array elements to be user / system values in microseconds. + fields[0] = MICROS_PER_SEC * rusage.ru_utime.tv_sec + rusage.ru_utime.tv_usec; + fields[1] = MICROS_PER_SEC * rusage.ru_stime.tv_sec + rusage.ru_stime.tv_usec; +} + static void Cwd(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); CHECK(env->has_run_bootstrapping_code()); @@ -745,6 +768,7 @@ static void CreatePerIsolateProperties(IsolateData* isolate_data, SetMethod(isolate, target, "availableMemory", GetAvailableMemory); SetMethod(isolate, target, "rss", Rss); SetMethod(isolate, target, "cpuUsage", CPUUsage); + SetMethod(isolate, target, "threadCpuUsage", ThreadCPUUsage); SetMethod(isolate, target, "resourceUsage", ResourceUsage); SetMethod(isolate, target, "_debugEnd", DebugEnd); @@ -793,6 +817,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GetAvailableMemory); registry->Register(Rss); registry->Register(CPUUsage); + registry->Register(ThreadCPUUsage); registry->Register(ResourceUsage); registry->Register(GetActiveRequests); diff --git a/src/node_realm.h b/src/node_realm.h index 51fbd502a10eb6..47715126c55080 100644 --- a/src/node_realm.h +++ b/src/node_realm.h @@ -157,7 +157,7 @@ class Realm : public MemoryRetainer { CleanupQueue cleanup_queue_; }; -class PrincipalRealm : public Realm { +class PrincipalRealm final : public Realm { public: PrincipalRealm(Environment* env, v8::Local context, diff --git a/src/node_sea.cc b/src/node_sea.cc index 65a338e00d4e22..508c83a4177301 100644 --- a/src/node_sea.cc +++ b/src/node_sea.cc @@ -3,7 +3,6 @@ #include "blob_serializer_deserializer-inl.h" #include "debug_utils-inl.h" #include "env-inl.h" -#include "json_parser.h" #include "node_contextify.h" #include "node_errors.h" #include "node_external_reference.h" @@ -11,6 +10,7 @@ #include "node_snapshot_builder.h" #include "node_union_bytes.h" #include "node_v8_platform-inl.h" +#include "simdjson.h" #include "util-inl.h" // The POSTJECT_SENTINEL_FUSE macro is a string of random characters selected by @@ -303,79 +303,131 @@ std::optional ParseSingleExecutableConfig( } SeaConfig result; - JSONParser parser; - if (!parser.Parse(config)) { - FPrintF(stderr, "Cannot parse JSON from %s\n", config_path); - return std::nullopt; - } - result.main_path = - parser.GetTopLevelStringField("main").value_or(std::string()); - if (result.main_path.empty()) { - FPrintF(stderr, - "\"main\" field of %s is not a non-empty string\n", - config_path); - return std::nullopt; - } + simdjson::ondemand::parser parser; + simdjson::ondemand::document document; + simdjson::ondemand::object main_object; + simdjson::error_code error = + parser.iterate(simdjson::pad(config)).get(document); - result.output_path = - parser.GetTopLevelStringField("output").value_or(std::string()); - if (result.output_path.empty()) { + if (!error) { + error = document.get_object().get(main_object); + } + if (error) { FPrintF(stderr, - "\"output\" field of %s is not a non-empty string\n", - config_path); + "Cannot parse JSON from %s: %s\n", + config_path, + simdjson::error_message(error)); return std::nullopt; } - std::optional disable_experimental_sea_warning = - parser.GetTopLevelBoolField("disableExperimentalSEAWarning"); - if (!disable_experimental_sea_warning.has_value()) { - FPrintF(stderr, + bool use_snapshot_value = false; + bool use_code_cache_value = false; + + for (auto field : main_object) { + std::string_view key; + if (field.unescaped_key().get(key)) { + FPrintF(stderr, "Cannot read key from %s\n", config_path); + return std::nullopt; + } + if (key == "main") { + if (field.value().get_string().get(result.main_path) || + result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "output") { + if (field.value().get_string().get(result.output_path) || + result.output_path.empty()) { + FPrintF(stderr, + "\"output\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "disableExperimentalSEAWarning") { + bool disable_experimental_sea_warning; + if (field.value().get_bool().get(disable_experimental_sea_warning)) { + FPrintF( + stderr, "\"disableExperimentalSEAWarning\" field of %s is not a Boolean\n", config_path); - return std::nullopt; - } - if (disable_experimental_sea_warning.value()) { - result.flags |= SeaFlags::kDisableExperimentalSeaWarning; + return std::nullopt; + } + if (disable_experimental_sea_warning) { + result.flags |= SeaFlags::kDisableExperimentalSeaWarning; + } + } else if (key == "useSnapshot") { + if (field.value().get_bool().get(use_snapshot_value)) { + FPrintF(stderr, + "\"useSnapshot\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_snapshot_value) { + result.flags |= SeaFlags::kUseSnapshot; + } + } else if (key == "useCodeCache") { + if (field.value().get_bool().get(use_code_cache_value)) { + FPrintF(stderr, + "\"useCodeCache\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_code_cache_value) { + result.flags |= SeaFlags::kUseCodeCache; + } + } else if (key == "assets") { + simdjson::ondemand::object assets_object; + if (field.value().get_object().get(assets_object)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + simdjson::ondemand::value asset_value; + for (auto asset_field : assets_object) { + std::string_view key_str; + std::string_view value_str; + if (asset_field.unescaped_key().get(key_str) || + asset_field.value().get(asset_value) || + asset_value.get_string().get(value_str)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + + result.assets.emplace(key_str, value_str); + } + + if (!result.assets.empty()) { + result.flags |= SeaFlags::kIncludeAssets; + } + } } - std::optional use_snapshot = parser.GetTopLevelBoolField("useSnapshot"); - if (!use_snapshot.has_value()) { - FPrintF( - stderr, "\"useSnapshot\" field of %s is not a Boolean\n", config_path); - return std::nullopt; - } - if (use_snapshot.value()) { - result.flags |= SeaFlags::kUseSnapshot; + if (static_cast(result.flags & SeaFlags::kUseSnapshot) && + static_cast(result.flags & SeaFlags::kUseCodeCache)) { + // TODO(joyeecheung): code cache in snapshot should be configured by + // separate snapshot configurations. + FPrintF(stderr, + "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); } - std::optional use_code_cache = - parser.GetTopLevelBoolField("useCodeCache"); - if (!use_code_cache.has_value()) { - FPrintF( - stderr, "\"useCodeCache\" field of %s is not a Boolean\n", config_path); + if (result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); return std::nullopt; } - if (use_code_cache.value()) { - if (use_snapshot.value()) { - // TODO(joyeecheung): code cache in snapshot should be configured by - // separate snapshot configurations. - FPrintF(stderr, - "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); - } else { - result.flags |= SeaFlags::kUseCodeCache; - } - } - auto assets_opt = parser.GetTopLevelStringDict("assets"); - if (!assets_opt.has_value()) { + if (result.output_path.empty()) { FPrintF(stderr, - "\"assets\" field of %s is not a map of strings\n", + "\"output\" field of %s is not a non-empty string\n", config_path); return std::nullopt; - } else if (!assets_opt.value().empty()) { - result.flags |= SeaFlags::kIncludeAssets; - result.assets = std::move(assets_opt.value()); } return result; diff --git a/src/node_shadow_realm.h b/src/node_shadow_realm.h index c2a6f2c8cb8d05..65844349f7ab49 100644 --- a/src/node_shadow_realm.h +++ b/src/node_shadow_realm.h @@ -9,7 +9,7 @@ namespace node { namespace shadow_realm { -class ShadowRealm : public Realm { +class ShadowRealm final : public Realm { public: static ShadowRealm* New(Environment* env); diff --git a/src/node_version.h b/src/node_version.h index f721c0071607d0..e0b2f884c576dd 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 22 -#define NODE_MINOR_VERSION 18 -#define NODE_PATCH_VERSION 1 +#define NODE_MINOR_VERSION 19 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 1 #define NODE_VERSION_LTS_CODENAME "Jod" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/node_watchdog.cc b/src/node_watchdog.cc index 4403bd4f157bff..25fb194e954031 100644 --- a/src/node_watchdog.cc +++ b/src/node_watchdog.cc @@ -308,7 +308,10 @@ int SigintWatchdogHelper::Start() { CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, &savemask)); sigmask = savemask; int ret = pthread_create(&thread_, nullptr, RunSigintWatchdog, nullptr); - CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, nullptr)); + + auto cleanup = OnScopeLeave( + [&]() { CHECK_EQ(0, pthread_sigmask(SIG_SETMASK, &sigmask, nullptr)); }); + if (ret != 0) { return ret; } diff --git a/src/node_worker.cc b/src/node_worker.cc index 9d56d8f793ef48..e2dbdd39b06c4f 100644 --- a/src/node_worker.cc +++ b/src/node_worker.cc @@ -32,6 +32,7 @@ using v8::Isolate; using v8::Local; using v8::Locker; using v8::Maybe; +using v8::Name; using v8::Null; using v8::Number; using v8::Object; @@ -811,6 +812,81 @@ void Worker::Unref(const FunctionCallbackInfo& args) { } } +class WorkerCpuUsageTaker : public AsyncWrap { + public: + WorkerCpuUsageTaker(Environment* env, Local obj) + : AsyncWrap(env, obj, AsyncWrap::PROVIDER_WORKERCPUUSAGE) {} + + SET_NO_MEMORY_INFO() + SET_MEMORY_INFO_NAME(WorkerCpuUsageTaker) + SET_SELF_SIZE(WorkerCpuUsageTaker) +}; + +void Worker::CpuUsage(const FunctionCallbackInfo& args) { + Worker* w; + ASSIGN_OR_RETURN_UNWRAP(&w, args.This()); + + Environment* env = w->env(); + AsyncHooks::DefaultTriggerAsyncIdScope trigger_id_scope(w); + Local wrap; + if (!env->worker_cpu_usage_taker_template() + ->NewInstance(env->context()) + .ToLocal(&wrap)) { + return; + } + + BaseObjectPtr taker = + MakeDetachedBaseObject(env, wrap); + + bool scheduled = w->RequestInterrupt([taker = std::move(taker), + env](Environment* worker_env) mutable { + auto cpu_usage_stats = std::make_unique(); + int err = uv_getrusage_thread(cpu_usage_stats.get()); + + env->SetImmediateThreadsafe( + [taker = std::move(taker), + cpu_usage_stats = std::move(cpu_usage_stats), + err = err](Environment* env) mutable { + Isolate* isolate = env->isolate(); + HandleScope handle_scope(isolate); + Context::Scope context_scope(env->context()); + AsyncHooks::DefaultTriggerAsyncIdScope trigger_id_scope(taker.get()); + + Local argv[] = { + Null(isolate), + Undefined(isolate), + }; + + if (err) { + argv[0] = UVException( + isolate, err, "uv_getrusage_thread", nullptr, nullptr, nullptr); + } else { + Local names[] = { + FIXED_ONE_BYTE_STRING(isolate, "user"), + FIXED_ONE_BYTE_STRING(isolate, "system"), + }; + Local values[] = { + Number::New(isolate, + 1e6 * cpu_usage_stats->ru_utime.tv_sec + + cpu_usage_stats->ru_utime.tv_usec), + Number::New(isolate, + 1e6 * cpu_usage_stats->ru_stime.tv_sec + + cpu_usage_stats->ru_stime.tv_usec), + }; + argv[1] = Object::New( + isolate, Null(isolate), names, values, arraysize(names)); + } + + taker->MakeCallback(env->ondone_string(), arraysize(argv), argv); + }, + CallbackFlags::kUnrefed); + }); + + if (scheduled) { + args.GetReturnValue().Set(wrap); + } +} + class WorkerHeapStatisticsTaker : public AsyncWrap { public: WorkerHeapStatisticsTaker(Environment* env, Local obj) @@ -1102,6 +1178,7 @@ void CreateWorkerPerIsolateProperties(IsolateData* isolate_data, SetProtoMethod(isolate, w, "loopIdleTime", Worker::LoopIdleTime); SetProtoMethod(isolate, w, "loopStartTime", Worker::LoopStartTime); SetProtoMethod(isolate, w, "getHeapStatistics", Worker::GetHeapStatistics); + SetProtoMethod(isolate, w, "cpuUsage", Worker::CpuUsage); SetConstructorFunction(isolate, target, "Worker", w); } @@ -1134,6 +1211,19 @@ void CreateWorkerPerIsolateProperties(IsolateData* isolate_data, wst->InstanceTemplate()); } + { + Local wst = NewFunctionTemplate(isolate, nullptr); + + wst->InstanceTemplate()->SetInternalFieldCount( + WorkerCpuUsageTaker::kInternalFieldCount); + wst->Inherit(AsyncWrap::GetConstructorTemplate(isolate_data)); + + Local wst_string = + FIXED_ONE_BYTE_STRING(isolate, "WorkerCpuUsageTaker"); + wst->SetClassName(wst_string); + isolate_data->set_worker_cpu_usage_taker_template(wst->InstanceTemplate()); + } + SetMethod(isolate, target, "getEnvMessagePort", GetEnvMessagePort); } @@ -1200,6 +1290,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(Worker::LoopIdleTime); registry->Register(Worker::LoopStartTime); registry->Register(Worker::GetHeapStatistics); + registry->Register(Worker::CpuUsage); } } // anonymous namespace diff --git a/src/node_worker.h b/src/node_worker.h index ca022104bd4024..9e80a764a8bd71 100644 --- a/src/node_worker.h +++ b/src/node_worker.h @@ -80,6 +80,7 @@ class Worker : public AsyncWrap { static void LoopStartTime(const v8::FunctionCallbackInfo& args); static void GetHeapStatistics( const v8::FunctionCallbackInfo& args); + static void CpuUsage(const v8::FunctionCallbackInfo& args); private: bool CreateEnvMessagePort(Environment* env); diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 037989397f4a5e..4e3af35a1347d7 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -324,7 +324,8 @@ class ZstdCompressContext final : public ZstdContext { CompressionError ResetStream(); // Zstd specific: - CompressionError Init(uint64_t pledged_src_size); + CompressionError Init(uint64_t pledged_src_size, + std::string_view dictionary = {}); CompressionError SetParameter(int key, int value); // Wrap ZSTD_freeCCtx to remove the return type. @@ -349,7 +350,9 @@ class ZstdDecompressContext final : public ZstdContext { CompressionError ResetStream(); // Zstd specific: - CompressionError Init(uint64_t pledged_src_size); + CompressionError Init(uint64_t pledged_src_size, + std::string_view dictionary = {}); + CompressionError SetParameter(int key, int value); // Wrap ZSTD_freeDCtx to remove the return type. @@ -874,8 +877,10 @@ class ZstdStream final : public CompressionStream { Environment* env = Environment::GetCurrent(args); Local context = env->context(); - CHECK(args.Length() == 4 && - "init(params, pledgedSrcSize, writeResult, writeCallback)"); + CHECK((args.Length() == 4 || args.Length() == 5) && + "init(params, pledgedSrcSize, writeResult, writeCallback[, " + "dictionary])"); + ZstdStream* wrap; ASSIGN_OR_RETURN_UNWRAP(&wrap, args.This()); @@ -903,7 +908,19 @@ class ZstdStream final : public CompressionStream { } AllocScope alloc_scope(wrap); - CompressionError err = wrap->context()->Init(pledged_src_size); + std::string_view dictionary; + ArrayBufferViewContents contents; + if (args.Length() == 5 && !args[4]->IsUndefined()) { + if (!args[4]->IsArrayBufferView()) { + THROW_ERR_INVALID_ARG_TYPE( + wrap->env(), "dictionary must be an ArrayBufferView if provided"); + return; + } + contents.ReadValue(args[4]); + dictionary = std::string_view(contents.data(), contents.length()); + } + + CompressionError err = wrap->context()->Init(pledged_src_size, dictionary); if (err.IsError()) { wrap->EmitError(err); THROW_ERR_ZLIB_INITIALIZATION_FAILED(wrap->env(), err.message); @@ -1508,7 +1525,8 @@ CompressionError ZstdCompressContext::SetParameter(int key, int value) { return {}; } -CompressionError ZstdCompressContext::Init(uint64_t pledged_src_size) { +CompressionError ZstdCompressContext::Init(uint64_t pledged_src_size, + std::string_view dictionary) { pledged_src_size_ = pledged_src_size; cctx_.reset(ZSTD_createCCtx()); if (!cctx_) { @@ -1516,6 +1534,17 @@ CompressionError ZstdCompressContext::Init(uint64_t pledged_src_size) { "ERR_ZLIB_INITIALIZATION_FAILED", -1); } + + if (!dictionary.empty()) { + size_t ret = ZSTD_CCtx_loadDictionary( + cctx_.get(), dictionary.data(), dictionary.size()); + if (ZSTD_isError(ret)) { + return CompressionError("Failed to load zstd dictionary", + "ERR_ZLIB_DICTIONARY_LOAD_FAILED", + -1); + } + } + size_t result = ZSTD_CCtx_setPledgedSrcSize(cctx_.get(), pledged_src_size); if (ZSTD_isError(result)) { return CompressionError( @@ -1548,13 +1577,24 @@ CompressionError ZstdDecompressContext::SetParameter(int key, int value) { return {}; } -CompressionError ZstdDecompressContext::Init(uint64_t pledged_src_size) { +CompressionError ZstdDecompressContext::Init(uint64_t pledged_src_size, + std::string_view dictionary) { dctx_.reset(ZSTD_createDCtx()); if (!dctx_) { return CompressionError("Could not initialize zstd instance", "ERR_ZLIB_INITIALIZATION_FAILED", -1); } + + if (!dictionary.empty()) { + size_t ret = ZSTD_DCtx_loadDictionary( + dctx_.get(), dictionary.data(), dictionary.size()); + if (ZSTD_isError(ret)) { + return CompressionError("Failed to load zstd dictionary", + "ERR_ZLIB_DICTIONARY_LOAD_FAILED", + -1); + } + } return {}; } diff --git a/src/signal_wrap.cc b/src/signal_wrap.cc index d4fe8a3bcdd9ff..cd4960a4ec9e28 100644 --- a/src/signal_wrap.cc +++ b/src/signal_wrap.cc @@ -170,7 +170,7 @@ void DecreaseSignalHandlerCount(int signum) { bool HasSignalJSHandler(int signum) { Mutex::ScopedLock lock(handled_signals_mutex); - return handled_signals.find(signum) != handled_signals.end(); + return handled_signals.contains(signum); } } // namespace node diff --git a/src/stream_pipe.cc b/src/stream_pipe.cc index 0b0cf4eb30d59f..512c3f16e441c1 100644 --- a/src/stream_pipe.cc +++ b/src/stream_pipe.cc @@ -278,6 +278,8 @@ void StreamPipe::New(const FunctionCallbackInfo& args) { CHECK(args[1]->IsObject()); StreamBase* source = StreamBase::FromObject(args[0].As()); StreamBase* sink = StreamBase::FromObject(args[1].As()); + CHECK_NOT_NULL(source); + CHECK_NOT_NULL(sink); if (StreamPipe::New(source, sink, args.This()).IsNothing()) return; } diff --git a/src/util-inl.h b/src/util-inl.h index cc41a1fb5459a5..b21f7a8260ca6a 100644 --- a/src/util-inl.h +++ b/src/util-inl.h @@ -598,7 +598,14 @@ constexpr bool FastStringKey::operator==(const FastStringKey& other) const { return name_ == other.name_; } -constexpr FastStringKey::FastStringKey(std::string_view name) +consteval FastStringKey::FastStringKey(std::string_view name) + : FastStringKey(name, 0) {} + +constexpr FastStringKey FastStringKey::AllowDynamic(std::string_view name) { + return FastStringKey(name, 0); +} + +constexpr FastStringKey::FastStringKey(std::string_view name, int dummy) : name_(name), cached_hash_(HashImpl(name)) {} constexpr std::string_view FastStringKey::as_string_view() const { diff --git a/src/util.h b/src/util.h index 6376cf4f81113c..dbec66247852df 100644 --- a/src/util.h +++ b/src/util.h @@ -817,7 +817,11 @@ class PersistentToLocal { // computations. class FastStringKey { public: - constexpr explicit FastStringKey(std::string_view name); + // consteval ensures that the argument is a compile-time constant. + consteval explicit FastStringKey(std::string_view name); + // passing something that is not a compile-time constant needs explicit + // opt-in via this helper, as it defeats the purpose of FastStringKey. + static constexpr FastStringKey AllowDynamic(std::string_view name); struct Hash { constexpr size_t operator()(const FastStringKey& key) const; @@ -827,6 +831,8 @@ class FastStringKey { constexpr std::string_view as_string_view() const; private: + constexpr explicit FastStringKey(std::string_view name, int dummy); + static constexpr size_t HashImpl(std::string_view str); const std::string_view name_; diff --git a/suppressions.supp b/suppressions.supp deleted file mode 100644 index 9dc89fb83df5b4..00000000000000 --- a/suppressions.supp +++ /dev/null @@ -1,6 +0,0 @@ -vptr:deps/icu-small/source/common/uloc_tag.cpp -vptr:deps/icu-small/source/common/unistr.cpp -shift-base:deps/v8/src/wasm/decoder.h -vptr:deps/icu-small/source/common/sharedobject.cpp -vptr:deps/icu-small/source/i18n/coll.cpp -nonnull-attribute:deps/v8/src/snapshot/snapshot-source-sink.h diff --git a/test/addons/report-fatalerror/test.js b/test/addons/report-fatalerror/test.js index 96ee7223c0f9ff..b5018565ea27a8 100644 --- a/test/addons/report-fatalerror/test.js +++ b/test/addons/report-fatalerror/test.js @@ -27,7 +27,7 @@ const ARGS = [ tmpdir.refresh(); const args = ['--report-on-fatalerror', ...ARGS]; const child = spawnSync(process.execPath, args, { cwd: tmpdir.path }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 1); @@ -46,7 +46,7 @@ const ARGS = [ // Verify that --report-on-fatalerror is respected when not set. const args = ARGS; const child = spawnSync(process.execPath, args, { cwd: tmpdir.path }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 0); } diff --git a/test/benchmark/benchmark.status b/test/benchmark/benchmark.status index 9487ac00829783..ea666f58a73aeb 100644 --- a/test/benchmark/benchmark.status +++ b/test/benchmark/benchmark.status @@ -23,3 +23,5 @@ test-benchmark-crypto: PASS, FLAKY [$arch==arm] +[$system==ibmi] +test-benchmark-websocket: SKIP diff --git a/test/common/README.md b/test/common/README.md index a5b90abf1f006f..6c8fcf42847a17 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -14,7 +14,7 @@ several other tasks: don't accidentally pollute the global namespace. * Some tests assume a default umask of `0o022`. To enforce this assumption, - the common module sets the unmask at startup. Tests that require a + the common module sets the umask at startup. Tests that require a different umask can override this setting after loading the module. * Some tests specify runtime flags (example, `--expose-internals`) via a @@ -841,7 +841,7 @@ frames for testing of HTTP/2 endpoints const http2 = require('../common/http2'); ``` -### Class: Frame +### Class: `Frame` The `http2.Frame` is a base class that creates a `Buffer` containing a serialized HTTP/2 frame header. @@ -861,7 +861,7 @@ socket.write(frame.data); The serialized `Buffer` may be retrieved using the `frame.data` property. -### Class: HeadersFrame +### Class: `HeadersFrame` The `http2.HeadersFrame` is a subclass of `http2.Frame` that serializes a `HEADERS` frame. @@ -880,7 +880,7 @@ const frame = new http2.HeadersFrame(id, payload, padlen, final); socket.write(frame.data); ``` -### Class: SettingsFrame +### Class: `SettingsFrame` The `http2.SettingsFrame` is a subclass of `http2.Frame` that serializes an empty `SETTINGS` frame. @@ -1138,7 +1138,7 @@ See the source code for definitions. Please avoid using it in new code - the current usage of this port in tests is being migrated to the original WPT harness, see [the WPT tests README][]. -### Class: WPTRunner +### Class: `WPTRunner` A driver class for running WPT with the WPT harness in a worker thread. diff --git a/test/common/debugger.js b/test/common/debugger.js index dfa040aa453bec..1a258913e00dd7 100644 --- a/test/common/debugger.js +++ b/test/common/debugger.js @@ -20,10 +20,14 @@ function isPreBreak(output) { return /Break on start/.test(output) && /1 \(function \(exports/.test(output); } -function startCLI(args, flags = [], spawnOpts = {}) { +function startCLI(args, flags = [], spawnOpts = {}, opts = { randomPort: true }) { let stderrOutput = ''; - const child = - spawn(process.execPath, [...flags, 'inspect', ...args], spawnOpts); + const child = spawn(process.execPath, [ + ...flags, + 'inspect', + ...(opts.randomPort !== false ? ['--port=0'] : []), + ...args, + ], spawnOpts); const outputBuffer = []; function bufferOutput(chunk) { diff --git a/test/common/tls.js b/test/common/tls.js index bbc37df19c5549..e59255191b4290 100644 --- a/test/common/tls.js +++ b/test/common/tls.js @@ -186,4 +186,36 @@ exports.assertIsCAArray = function assertIsCAArray(certs) { } }; +function extractMetadata(cert) { + const x509 = new crypto.X509Certificate(cert); + return { + serialNumber: x509.serialNumber, + issuer: x509.issuer, + subject: x509.subject, + }; +} + +// To compare two certificates, we can just compare serialNumber, issuer, +// and subject like X509_comp(). We can't just compare two strings because +// the line endings or order of the fields may differ after PEM serdes by +// OpenSSL. +exports.assertEqualCerts = function assertEqualCerts(a, b) { + const setA = new Set(a.map(extractMetadata)); + const setB = new Set(b.map(extractMetadata)); + assert.deepStrictEqual(setA, setB); +}; + +exports.includesCert = function includesCert(certs, cert) { + const metadata = extractMetadata(cert); + for (const c of certs) { + const cMetadata = extractMetadata(c); + if (cMetadata.serialNumber === metadata.serialNumber && + cMetadata.issuer === metadata.issuer && + cMetadata.subject === metadata.subject) { + return true; + } + } + return false; +}; + exports.TestTLSSocket = TestTLSSocket; diff --git a/test/es-module/es-module.status b/test/es-module/es-module.status index 32b712cbef0c8f..376b9cdeb07b78 100644 --- a/test/es-module/es-module.status +++ b/test/es-module/es-module.status @@ -16,4 +16,4 @@ test-wasm-web-api: SKIP [$system==ibmi] # https://github.com/nodejs/node/issues/58582 -test-wasm-web-api: PASS,FLAKY +test-wasm-web-api: SKIP diff --git a/test/es-module/test-esm-wasm.mjs b/test/es-module/test-esm-wasm.mjs index a6f056b823172f..de0aa7929789ec 100644 --- a/test/es-module/test-esm-wasm.mjs +++ b/test/es-module/test-esm-wasm.mjs @@ -9,7 +9,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it('should load exports', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -32,7 +31,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it('should not allow code injection through export names', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', `import * as wasmExports from ${JSON.stringify(fixtures.fileURL('es-modules/export-name-code-injection.wasm'))};`, @@ -46,7 +44,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it('should allow non-identifier export names', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -64,7 +61,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it('should properly handle all WebAssembly global types', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -211,7 +207,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it('should properly escape import names as well', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -226,22 +221,20 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => strictEqual(code, 0); }); - it('should emit experimental warning', async () => { + it('should emit experimental warning for module instances', async () => { const { code, signal, stderr } = await spawnPromisified(execPath, [ - '--experimental-wasm-modules', fixtures.path('es-modules/wasm-modules.mjs'), ]); strictEqual(code, 0); strictEqual(signal, null); match(stderr, /ExperimentalWarning/); - match(stderr, /WebAssembly/); + match(stderr, /Importing WebAssembly module instances/); }); it('should support top-level execution', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', fixtures.path('es-modules/top-level-wasm.wasm'), ]); @@ -254,7 +247,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should support static source phase imports', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -276,7 +268,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should support dynamic source phase imports', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -299,7 +290,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should not execute source phase imports', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -319,7 +309,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should not execute dynamic source phase imports', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', `await import.source(${JSON.stringify(fixtures.fileURL('es-modules/unimportable.wasm'))})`, @@ -335,7 +324,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => const fileUrl = fixtures.fileURL('es-modules/wasm-source-phase.js'); const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', [ @@ -358,7 +346,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => const fileUrl = fixtures.fileURL('es-modules/wasm-source-phase.js'); const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--input-type=module', '--eval', `import source nosource from ${JSON.stringify(fileUrl)};`, @@ -373,7 +360,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should throw for vm source phase static import', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--experimental-vm-modules', '--input-type=module', '--eval', @@ -393,7 +379,6 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => it.skip('should throw for vm source phase dynamic import', async () => { const { code, stderr, stdout } = await spawnPromisified(execPath, [ '--no-warnings', - '--experimental-wasm-modules', '--experimental-vm-modules', '--input-type=module', '--eval', @@ -410,4 +395,89 @@ describe('ESM: WASM modules', { concurrency: !process.env.TEST_PARALLEL }, () => strictEqual(stdout, ''); notStrictEqual(code, 0); }); + + it('should reject wasm: import names', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + `import(${JSON.stringify(fixtures.fileURL('es-modules/invalid-import-name.wasm'))})`, + ]); + + match(stderr, /Invalid Wasm import name/); + strictEqual(stdout, ''); + notStrictEqual(code, 0); + }); + + it('should reject wasm-js: import names', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + `import(${JSON.stringify(fixtures.fileURL('es-modules/invalid-import-name-wasm-js.wasm'))})`, + ]); + + match(stderr, /Invalid Wasm import name/); + strictEqual(stdout, ''); + notStrictEqual(code, 0); + }); + + it('should reject wasm-js: import module names', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + `import(${JSON.stringify(fixtures.fileURL('es-modules/invalid-import-module.wasm'))})`, + ]); + + match(stderr, /Invalid Wasm import/); + strictEqual(stdout, ''); + notStrictEqual(code, 0); + }); + + it('should reject wasm: export names', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + `import(${JSON.stringify(fixtures.fileURL('es-modules/invalid-export-name.wasm'))})`, + ]); + + match(stderr, /Invalid Wasm export/); + strictEqual(stdout, ''); + notStrictEqual(code, 0); + }); + + it('should reject wasm-js: export names', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + `import(${JSON.stringify(fixtures.fileURL('es-modules/invalid-export-name-wasm-js.wasm'))})`, + ]); + + match(stderr, /Invalid Wasm export/); + strictEqual(stdout, ''); + notStrictEqual(code, 0); + }); + + it('should support js-string builtins', async () => { + const { code, stderr, stdout } = await spawnPromisified(execPath, [ + '--no-warnings', + '--input-type=module', + '--eval', + [ + 'import { strictEqual } from "node:assert";', + `import * as wasmExports from ${JSON.stringify(fixtures.fileURL('es-modules/js-string-builtins.wasm'))};`, + 'strictEqual(wasmExports.getLength("hello"), 5);', + 'strictEqual(wasmExports.concatStrings("hello", " world"), "hello world");', + 'strictEqual(wasmExports.compareStrings("test", "test"), 1);', + 'strictEqual(wasmExports.compareStrings("test", "different"), 0);', + ].join('\n'), + ]); + + strictEqual(stderr, ''); + strictEqual(stdout, ''); + strictEqual(code, 0); + }); }); diff --git a/test/es-module/test-import-module-retry-require-errored.js b/test/es-module/test-import-module-retry-require-errored.js new file mode 100644 index 00000000000000..96203722f6c604 --- /dev/null +++ b/test/es-module/test-import-module-retry-require-errored.js @@ -0,0 +1,17 @@ +// This tests that after failing to import an ESM that rejects, +// retrying with require() still throws. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +(async () => { + await assert.rejects(import('../fixtures/es-modules/throw-error.mjs'), { + message: 'test', + }); + assert.throws(() => { + require('../fixtures/es-modules/throw-error.mjs'); + }, { + message: 'test', + }); +})().then(common.mustCall()); diff --git a/test/es-module/test-require-module-retry-import-errored-2.js b/test/es-module/test-require-module-retry-import-errored-2.js new file mode 100644 index 00000000000000..34e743a7c6bc78 --- /dev/null +++ b/test/es-module/test-require-module-retry-import-errored-2.js @@ -0,0 +1,16 @@ +// This tests that after failing to require an ESM that throws, +// retrying with import() still rejects. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +assert.throws(() => { + require('../fixtures/es-modules/throw-error.mjs'); +}, { + message: 'test', +}); + +assert.rejects(import('../fixtures/es-modules/throw-error.mjs'), { + message: 'test', +}).then(common.mustCall()); diff --git a/test/es-module/test-require-module-tla-retry-import-2.js b/test/es-module/test-require-module-tla-retry-import-2.js index aa9c344dd398d7..2ca8e07449c2fa 100644 --- a/test/es-module/test-require-module-tla-retry-import-2.js +++ b/test/es-module/test-require-module-tla-retry-import-2.js @@ -23,4 +23,4 @@ async function test() { } // Run the test twice to check consistency after caching. -test().then(common.mustCall(test)).catch(common.mustNotCall()); +test().then(common.mustCall(test)); diff --git a/test/es-module/test-require-module-tla-retry-import.js b/test/es-module/test-require-module-tla-retry-import.js index 70f918fa4f463f..80b289fdfc8c43 100644 --- a/test/es-module/test-require-module-tla-retry-import.js +++ b/test/es-module/test-require-module-tla-retry-import.js @@ -22,4 +22,4 @@ async function test() { } // Run the test twice to check consistency after caching. -test().then(common.mustCall(test)).catch(common.mustNotCall()); +test().then(common.mustCall(test)); diff --git a/test/es-module/test-vm-main-context-default-loader-eval.js b/test/es-module/test-vm-main-context-default-loader-eval.js index 3226c3512f4887..7bde86ba551b6c 100644 --- a/test/es-module/test-vm-main-context-default-loader-eval.js +++ b/test/es-module/test-vm-main-context-default-loader-eval.js @@ -70,4 +70,4 @@ async function main() { } } -main().catch(common.mustNotCall()); +main().then(common.mustCall()); diff --git a/test/es-module/test-vm-main-context-default-loader.js b/test/es-module/test-vm-main-context-default-loader.js index 9e45bbd9a12a4e..d2ac6021df3837 100644 --- a/test/es-module/test-vm-main-context-default-loader.js +++ b/test/es-module/test-vm-main-context-default-loader.js @@ -116,4 +116,4 @@ async function main() { } } -main().catch(common.mustNotCall()); +main().then(common.mustCall()); diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index b91112deee704f..859dba5943993e 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -35,6 +35,10 @@ export default [ selector: "CallExpression:matches([callee.name='deepStrictEqual'], [callee.property.name='deepStrictEqual'])[arguments.2.type='Literal']", message: 'Do not use a literal for the third argument of assert.deepStrictEqual()', }, + { + selector: "CallExpression:matches([callee.name='notDeepStrictEqual'], [callee.property.name='deepStrictEqual'])[arguments.2.type='Literal']", + message: 'Do not use a literal for the third argument of assert.notDeepStrictEqual()', + }, { selector: "CallExpression:matches([callee.name='doesNotThrow'], [callee.property.name='doesNotThrow'])", message: 'Do not use `assert.doesNotThrow()`. Write the code without the wrapper and add a comment instead.', @@ -47,10 +51,18 @@ export default [ selector: "CallExpression:matches([callee.name='rejects'], [callee.property.name='rejects'])[arguments.length<2]", message: '`assert.rejects()` must be invoked with at least two arguments.', }, + { + selector: "CallExpression[callee.property.name='notStrictEqual'][arguments.2.type='Literal']", + message: 'Do not use a literal for the third argument of assert.notStrictEqual()', + }, { selector: "CallExpression[callee.property.name='strictEqual'][arguments.2.type='Literal']", message: 'Do not use a literal for the third argument of assert.strictEqual()', }, + { + selector: "CallExpression[callee.name='assert'][arguments.1.type='Literal']:not([arguments.1.raw=/['\"`].*/])", + message: 'Do not use a non-string literal for the second argument of assert()', + }, { selector: "CallExpression:matches([callee.name='throws'], [callee.property.name='throws'])[arguments.1.type='Literal']:not([arguments.1.regex])", message: 'Use an object as second argument of `assert.throws()`.', @@ -91,6 +103,10 @@ export default [ selector: "ExpressionStatement>CallExpression:matches([callee.name='rejects'], [callee.object.name='assert'][callee.property.name='rejects'])", message: 'Calling `assert.rejects` without `await` or `.then(common.mustCall())` will not detect never-settling promises.', }, + { + selector: 'CallExpression[callee.property.name="catch"]>:first-child:matches(CallExpression[callee.object.name="common"][callee.property.name="mustNotCall"], CallExpression[callee.name="mustNotCall"])', + message: 'Calling `.catch(common.mustNotCall())` will not detect never-settling promises. Use `.then(common.mustCall())` instead.', + }, ], // Stylistic rules. diff --git a/test/fixtures/errors/error_exit.snapshot b/test/fixtures/errors/error_exit.snapshot index 778165dc25c4fc..9594e08b4dadf9 100644 --- a/test/fixtures/errors/error_exit.snapshot +++ b/test/fixtures/errors/error_exit.snapshot @@ -12,7 +12,8 @@ AssertionError [ERR_ASSERTION]: Expected values to be strictly equal: code: 'ERR_ASSERTION', actual: 1, expected: 2, - operator: 'strictEqual' + operator: 'strictEqual', + diff: 'simple' } Node.js * diff --git a/test/fixtures/errors/if-error-has-good-stack.snapshot b/test/fixtures/errors/if-error-has-good-stack.snapshot index 9296b25f10b7c6..ba76800b970028 100644 --- a/test/fixtures/errors/if-error-has-good-stack.snapshot +++ b/test/fixtures/errors/if-error-has-good-stack.snapshot @@ -19,7 +19,8 @@ AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error at a (*if-error-has-good-stack.js:*:*) at Object. (*if-error-has-good-stack.js:*:*), expected: null, - operator: 'ifError' + operator: 'ifError', + diff: 'simple' } Node.js * diff --git a/test/fixtures/es-modules/custom-condition/load.cjs b/test/fixtures/es-modules/custom-condition/load.cjs new file mode 100644 index 00000000000000..2fbeae9e71d485 --- /dev/null +++ b/test/fixtures/es-modules/custom-condition/load.cjs @@ -0,0 +1,6 @@ +exports.cjs = function(key) { + return require(key); +}; +exports.esm = function(key) { + return import(key); +}; diff --git a/test/fixtures/es-modules/custom-condition/node_modules/foo/default.cjs b/test/fixtures/es-modules/custom-condition/node_modules/foo/default.cjs new file mode 100644 index 00000000000000..fd595339e01723 --- /dev/null +++ b/test/fixtures/es-modules/custom-condition/node_modules/foo/default.cjs @@ -0,0 +1,2 @@ +exports.result = 'default'; + diff --git a/test/fixtures/es-modules/custom-condition/node_modules/foo/foo-esm.mjs b/test/fixtures/es-modules/custom-condition/node_modules/foo/foo-esm.mjs new file mode 100644 index 00000000000000..87499b2e3f3e16 --- /dev/null +++ b/test/fixtures/es-modules/custom-condition/node_modules/foo/foo-esm.mjs @@ -0,0 +1,2 @@ +export const result = 'foo-esm'; + diff --git a/test/fixtures/es-modules/custom-condition/node_modules/foo/foo.cjs b/test/fixtures/es-modules/custom-condition/node_modules/foo/foo.cjs new file mode 100644 index 00000000000000..a948034aa47ae6 --- /dev/null +++ b/test/fixtures/es-modules/custom-condition/node_modules/foo/foo.cjs @@ -0,0 +1,2 @@ +exports.result = 'foo'; + diff --git a/test/fixtures/es-modules/custom-condition/node_modules/foo/package.json b/test/fixtures/es-modules/custom-condition/node_modules/foo/package.json new file mode 100644 index 00000000000000..ee7eab9fc74963 --- /dev/null +++ b/test/fixtures/es-modules/custom-condition/node_modules/foo/package.json @@ -0,0 +1,28 @@ +{ + "exports": { + ".": { + "foo": "./foo.cjs", + "foo-esm": "./foo-esm.mjs", + "default": "./default.cjs" + }, + "./second": { + "foo": "./foo.cjs", + "foo-esm": "./foo-esm.mjs", + "default": "./default.cjs" + }, + "./third": { + "foo": "./foo.cjs", + "foo-esm": "./foo-esm.mjs", + "default": "./default.cjs" + }, + "./fourth": { + "foo": "./foo.cjs", + "foo-esm": "./foo-esm.mjs", + "default": "./default.cjs" + }, + "./no-default": { + "foo": "./foo.cjs", + "foo-esm": "./foo-esm.mjs" + } + } +} \ No newline at end of file diff --git a/test/fixtures/es-modules/invalid-export-name-wasm-js.wasm b/test/fixtures/es-modules/invalid-export-name-wasm-js.wasm new file mode 100644 index 00000000000000..a6b9a7f7c5ad57 Binary files /dev/null and b/test/fixtures/es-modules/invalid-export-name-wasm-js.wasm differ diff --git a/test/fixtures/es-modules/invalid-export-name-wasm-js.wat b/test/fixtures/es-modules/invalid-export-name-wasm-js.wat new file mode 100644 index 00000000000000..fc440ee341d74a --- /dev/null +++ b/test/fixtures/es-modules/invalid-export-name-wasm-js.wat @@ -0,0 +1,7 @@ +;; Test WASM module with invalid export name starting with 'wasm-js:' +(module + (func $test (result i32) + i32.const 42 + ) + (export "wasm-js:invalid" (func $test)) +) \ No newline at end of file diff --git a/test/fixtures/es-modules/invalid-export-name.wasm b/test/fixtures/es-modules/invalid-export-name.wasm new file mode 100644 index 00000000000000..e66fcebf15ba4d Binary files /dev/null and b/test/fixtures/es-modules/invalid-export-name.wasm differ diff --git a/test/fixtures/es-modules/invalid-export-name.wat b/test/fixtures/es-modules/invalid-export-name.wat new file mode 100644 index 00000000000000..ef99fef9cfa52f --- /dev/null +++ b/test/fixtures/es-modules/invalid-export-name.wat @@ -0,0 +1,7 @@ +;; Test WASM module with invalid export name starting with 'wasm:' +(module + (func $test (result i32) + i32.const 42 + ) + (export "wasm:invalid" (func $test)) +) \ No newline at end of file diff --git a/test/fixtures/es-modules/invalid-import-module.wasm b/test/fixtures/es-modules/invalid-import-module.wasm new file mode 100644 index 00000000000000..ead151ac0c84ad Binary files /dev/null and b/test/fixtures/es-modules/invalid-import-module.wasm differ diff --git a/test/fixtures/es-modules/invalid-import-module.wat b/test/fixtures/es-modules/invalid-import-module.wat new file mode 100644 index 00000000000000..63aacba332808c --- /dev/null +++ b/test/fixtures/es-modules/invalid-import-module.wat @@ -0,0 +1,8 @@ +;; Test WASM module with invalid import module name starting with 'wasm-js:' +(module + (import "wasm-js:invalid" "test" (func $invalidImport (result i32))) + (export "test" (func $test)) + (func $test (result i32) + call $invalidImport + ) +) \ No newline at end of file diff --git a/test/fixtures/es-modules/invalid-import-name-wasm-js.wasm b/test/fixtures/es-modules/invalid-import-name-wasm-js.wasm new file mode 100644 index 00000000000000..15131a28a89e11 Binary files /dev/null and b/test/fixtures/es-modules/invalid-import-name-wasm-js.wasm differ diff --git a/test/fixtures/es-modules/invalid-import-name-wasm-js.wat b/test/fixtures/es-modules/invalid-import-name-wasm-js.wat new file mode 100644 index 00000000000000..cb4d3eaf162818 --- /dev/null +++ b/test/fixtures/es-modules/invalid-import-name-wasm-js.wat @@ -0,0 +1,8 @@ +;; Test WASM module with invalid import name starting with 'wasm-js:' +(module + (import "test" "wasm-js:invalid" (func $invalidImport (result i32))) + (export "test" (func $test)) + (func $test (result i32) + call $invalidImport + ) +) \ No newline at end of file diff --git a/test/fixtures/es-modules/invalid-import-name.wasm b/test/fixtures/es-modules/invalid-import-name.wasm new file mode 100644 index 00000000000000..3c631418294584 Binary files /dev/null and b/test/fixtures/es-modules/invalid-import-name.wasm differ diff --git a/test/fixtures/es-modules/invalid-import-name.wat b/test/fixtures/es-modules/invalid-import-name.wat new file mode 100644 index 00000000000000..1aae87aaed4840 --- /dev/null +++ b/test/fixtures/es-modules/invalid-import-name.wat @@ -0,0 +1,8 @@ +;; Test WASM module with invalid import name starting with 'wasm:' +(module + (import "test" "wasm:invalid" (func $invalidImport (result i32))) + (export "test" (func $test)) + (func $test (result i32) + call $invalidImport + ) +) \ No newline at end of file diff --git a/test/fixtures/es-modules/js-string-builtins.wasm b/test/fixtures/es-modules/js-string-builtins.wasm new file mode 100644 index 00000000000000..b4c08587dd08e7 Binary files /dev/null and b/test/fixtures/es-modules/js-string-builtins.wasm differ diff --git a/test/fixtures/es-modules/js-string-builtins.wat b/test/fixtures/es-modules/js-string-builtins.wat new file mode 100644 index 00000000000000..cc404e8ca6f073 --- /dev/null +++ b/test/fixtures/es-modules/js-string-builtins.wat @@ -0,0 +1,29 @@ +;; Test WASM module using js-string builtins +(module + ;; Import js-string builtins with correct signatures + (import "wasm:js-string" "length" (func $string_length (param externref) (result i32))) + (import "wasm:js-string" "concat" (func $string_concat (param externref externref) (result (ref extern)))) + (import "wasm:js-string" "equals" (func $string_equals (param externref externref) (result i32))) + + ;; Export functions that use the builtins + (export "getLength" (func $get_length)) + (export "concatStrings" (func $concat_strings)) + (export "compareStrings" (func $compare_strings)) + + (func $get_length (param $str externref) (result i32) + local.get $str + call $string_length + ) + + (func $concat_strings (param $str1 externref) (param $str2 externref) (result (ref extern)) + local.get $str1 + local.get $str2 + call $string_concat + ) + + (func $compare_strings (param $str1 externref) (param $str2 externref) (result i32) + local.get $str1 + local.get $str2 + call $string_equals + ) +) \ No newline at end of file diff --git a/test/fixtures/inspector-network-resource/app.js.map b/test/fixtures/inspector-network-resource/app.js.map new file mode 100644 index 00000000000000..ca4bb9159baeaa --- /dev/null +++ b/test/fixtures/inspector-network-resource/app.js.map @@ -0,0 +1,10 @@ +{ + "version": 3, + "file": "app.js", + "sourceRoot": "", + "sources": [ + "http://localhost:3000/app.ts" + ], + "names": [], + "mappings": ";AAAA,SAAS,GAAG,CAAC,CAAS,EAAE,CAAS;IAC/B,OAAO,CAAC,GAAG,CAAC,CAAC;AACf,CAAC;AAED,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC" +} diff --git a/test/fixtures/source-map/output/source_map_throw_class_method.js b/test/fixtures/source-map/output/source_map_throw_class_method.js new file mode 100644 index 00000000000000..c8c4b722a41829 --- /dev/null +++ b/test/fixtures/source-map/output/source_map_throw_class_method.js @@ -0,0 +1,7 @@ +'use strict'; + +// Flags: --enable-source-maps + +require('../../../common'); +Error.stackTraceLimit = 2; +require('../throw-class-method.min.js'); diff --git a/test/fixtures/source-map/output/source_map_throw_class_method.snapshot b/test/fixtures/source-map/output/source_map_throw_class_method.snapshot new file mode 100644 index 00000000000000..a6b7984d4c7c76 --- /dev/null +++ b/test/fixtures/source-map/output/source_map_throw_class_method.snapshot @@ -0,0 +1,6 @@ +Error: This is a test + at Foo.bar (*/test/fixtures/source-map/throw-class-method.js:3:11) + at Object. (*/test/fixtures/source-map/throw-class-method.js:12:7) +Error: This is a test + at Bar.bar (*/test/fixtures/source-map/throw-class-method.js:3:11) + at Object. (*/test/fixtures/source-map/throw-class-method.js:19:7) diff --git a/test/fixtures/source-map/throw-class-method.js b/test/fixtures/source-map/throw-class-method.js new file mode 100644 index 00000000000000..2bfa9ce06675dd --- /dev/null +++ b/test/fixtures/source-map/throw-class-method.js @@ -0,0 +1,27 @@ +class Foo { + bar() { + throw Error('This is a test'); + } +} + +class Bar {} +Bar.prototype.bar = Foo.prototype.bar; + +try { + const foo = new Foo(); + foo.bar(); +} catch (e) { + console.error(e); +} + +try { + const bar = Object.create(Bar.prototype); + bar.bar(); +} catch (e) { + console.error(e); +} + +// To recreate: +// +// cd test/fixtures/source-map +// npx terser -o throw-class-method.min.js --source-map "url='throw-class-method.min.js.map'" throw-class-method.js diff --git a/test/fixtures/source-map/throw-class-method.min.js b/test/fixtures/source-map/throw-class-method.min.js new file mode 100644 index 00000000000000..eba49ea1c24b54 --- /dev/null +++ b/test/fixtures/source-map/throw-class-method.min.js @@ -0,0 +1,2 @@ +class Foo{bar(){throw Error("This is a test")}}class Bar{}Bar.prototype.bar=Foo.prototype.bar;try{const foo=new Foo;foo.bar()}catch(e){console.error(e)}try{const bar=Object.create(Bar.prototype);bar.bar()}catch(e){console.error(e)} +//# sourceMappingURL=throw-class-method.min.js.map \ No newline at end of file diff --git a/test/fixtures/source-map/throw-class-method.min.js.map b/test/fixtures/source-map/throw-class-method.min.js.map new file mode 100644 index 00000000000000..5be313d68fd551 --- /dev/null +++ b/test/fixtures/source-map/throw-class-method.min.js.map @@ -0,0 +1 @@ +{"version":3,"names":["Foo","bar","Error","Bar","prototype","foo","e","console","error","Object","create"],"sources":["throw-class-method.js"],"mappings":"AAAA,MAAMA,IACJ,GAAAC,GACE,MAAMC,MAAM,iBACd,EAGF,MAAMC,KACNA,IAAIC,UAAUH,IAAMD,IAAII,UAAUH,IAElC,IACE,MAAMI,IAAM,IAAIL,IAChBK,IAAIJ,KACN,CAAE,MAAOK,GACPC,QAAQC,MAAMF,EAChB,CAEA,IACE,MAAML,IAAMQ,OAAOC,OAAOP,IAAIC,WAC9BH,IAAIA,KACN,CAAE,MAAOK,GACPC,QAAQC,MAAMF,EAChB","ignoreList":[]} \ No newline at end of file diff --git a/test/fixtures/test-runner/output/assertion-color-tty.snapshot b/test/fixtures/test-runner/output/assertion-color-tty.snapshot index a74016febc5df4..4409d6f5e3e939 100644 --- a/test/fixtures/test-runner/output/assertion-color-tty.snapshot +++ b/test/fixtures/test-runner/output/assertion-color-tty.snapshot @@ -21,5 +21,6 @@ code: [32m'ERR_ASSERTION'[39m, actual: [32m'!Hello World'[39m, expected: [32m'Hello World!'[39m, - operator: [32m'strictEqual'[39m + operator: [32m'strictEqual'[39m, + diff: [32m'simple'[39m } diff --git a/test/fixtures/test-runner/output/dot_reporter.snapshot b/test/fixtures/test-runner/output/dot_reporter.snapshot index 5f2bf18e1d0137..94af64f7367a64 100644 --- a/test/fixtures/test-runner/output/dot_reporter.snapshot +++ b/test/fixtures/test-runner/output/dot_reporter.snapshot @@ -66,7 +66,8 @@ Failed tests: code: 'ERR_ASSERTION', actual: true, expected: false, - operator: 'strictEqual' + operator: 'strictEqual', + diff: 'simple' } ✖ reject fail (*ms) Error: rejected from reject fail @@ -219,7 +220,8 @@ Failed tests: code: 'ERR_ASSERTION', actual: [Object], expected: [Object], - operator: 'deepEqual' + operator: 'deepEqual', + diff: 'simple' } ✖ invalid subtest fail (*ms) 'test could not be started because its parent finished' diff --git a/test/fixtures/test-runner/output/junit_reporter.snapshot b/test/fixtures/test-runner/output/junit_reporter.snapshot index d244e7dd594bb3..23675adff631b3 100644 --- a/test/fixtures/test-runner/output/junit_reporter.snapshot +++ b/test/fixtures/test-runner/output/junit_reporter.snapshot @@ -124,7 +124,8 @@ true !== false code: 'ERR_ASSERTION', actual: true, expected: false, - operator: 'strictEqual' + operator: 'strictEqual', + diff: 'simple' } } @@ -499,7 +500,8 @@ should loosely deep-equal code: 'ERR_ASSERTION', actual: [Object], expected: [Object], - operator: 'deepEqual' + operator: 'deepEqual', + diff: 'simple' } } diff --git a/test/fixtures/test-runner/output/spec_reporter.snapshot b/test/fixtures/test-runner/output/spec_reporter.snapshot index 175a25000f1e60..2ed41a030364b0 100644 --- a/test/fixtures/test-runner/output/spec_reporter.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter.snapshot @@ -175,7 +175,8 @@ code: 'ERR_ASSERTION', actual: true, expected: false, - operator: 'strictEqual' + operator: 'strictEqual', + diff: 'simple' } * @@ -364,7 +365,8 @@ code: 'ERR_ASSERTION', actual: [Object], expected: [Object], - operator: 'deepEqual' + operator: 'deepEqual', + diff: 'simple' } * diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot index a6381bc50453cf..b1573e682c9931 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot @@ -175,7 +175,8 @@ code: 'ERR_ASSERTION', actual: true, expected: false, - operator: 'strictEqual' + operator: 'strictEqual', + diff: 'simple' } * @@ -364,7 +365,8 @@ code: 'ERR_ASSERTION', actual: { foo: 1, bar: 1, boo: [ 1 ], baz: { date: 1970-01-01T00:00:00.000Z, null: null, number: 1, string: 'Hello', undefined: undefined } }, expected: { boo: [ 1 ], baz: { date: 1970-01-01T00:00:00.000Z, null: null, number: 1, string: 'Hello', undefined: undefined }, circular: { bar: 2, c: [Circular *1] } }, - operator: 'deepEqual' + operator: 'deepEqual', + diff: 'simple' } * diff --git a/test/fixtures/tls-extra-ca-override.js b/test/fixtures/tls-extra-ca-override.js new file mode 100644 index 00000000000000..9d7065ba4f2462 --- /dev/null +++ b/test/fixtures/tls-extra-ca-override.js @@ -0,0 +1,50 @@ +'use strict'; + +// Test script for overidding NODE_EXTRA_CA_CERTS with tls.setDefaultCACertificates(). + +const tls = require('tls'); +const assert = require('assert'); +const { assertEqualCerts, includesCert } = require('../common/tls'); + +// Assert that NODE_EXTRA_CA_CERTS is set +assert(process.env.NODE_EXTRA_CA_CERTS, 'NODE_EXTRA_CA_CERTS environment variable should be set'); + +// Get initial state with extra CA +const initialDefaults = tls.getCACertificates('default'); +const systemCerts = tls.getCACertificates('system'); +const bundledCerts = tls.getCACertificates('bundled'); +const extraCerts = tls.getCACertificates('extra'); + +// For this test to work the extra certs must not be in bundled certs +assert.notStrictEqual(bundledCerts.length, 0); +for (const cert of extraCerts) { + assert(!includesCert(bundledCerts, cert)); +} + +// Test setting it to initial defaults. +tls.setDefaultCACertificates(initialDefaults); +assertEqualCerts(tls.getCACertificates('default'), initialDefaults); +assertEqualCerts(tls.getCACertificates('default'), initialDefaults); + +// Test setting it to the bundled certificates. +tls.setDefaultCACertificates(bundledCerts); +assertEqualCerts(tls.getCACertificates('default'), bundledCerts); +assertEqualCerts(tls.getCACertificates('default'), bundledCerts); + +// Test setting it to just the extra certificates. +tls.setDefaultCACertificates(extraCerts); +assertEqualCerts(tls.getCACertificates('default'), extraCerts); +assertEqualCerts(tls.getCACertificates('default'), extraCerts); + +// Test setting it to an empty array. +tls.setDefaultCACertificates([]); +assert.deepStrictEqual(tls.getCACertificates('default'), []); + +// Test bundled and extra certs are unaffected +assertEqualCerts(tls.getCACertificates('bundled'), bundledCerts); +assertEqualCerts(tls.getCACertificates('extra'), extraCerts); + +if (systemCerts.length > 0) { + // Test system certs are unaffected. + assertEqualCerts(tls.getCACertificates('system'), systemCerts); +} diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 629528d15a4176..672f1653b91275 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -26,16 +26,16 @@ Last update: - interfaces: https://github.com/web-platform-tests/wpt/tree/e1b27be06b/interfaces - performance-timeline: https://github.com/web-platform-tests/wpt/tree/94caab7038/performance-timeline - resource-timing: https://github.com/web-platform-tests/wpt/tree/22d38586d0/resource-timing -- resources: https://github.com/web-platform-tests/wpt/tree/1e140d63ec/resources +- resources: https://github.com/web-platform-tests/wpt/tree/1d2c5fb36a/resources - streams: https://github.com/web-platform-tests/wpt/tree/bc9dcbbf1a/streams - url: https://github.com/web-platform-tests/wpt/tree/a23788b77a/url - user-timing: https://github.com/web-platform-tests/wpt/tree/5ae85bf826/user-timing - wasm/jsapi: https://github.com/web-platform-tests/wpt/tree/cde25e7e3c/wasm/jsapi - wasm/webapi: https://github.com/web-platform-tests/wpt/tree/fd1b23eeaa/wasm/webapi -- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/591c95ce61/WebCryptoAPI -- webidl/ecmascript-binding/es-exceptions: https://github.com/web-platform-tests/wpt/tree/a370aad338/webidl/ecmascript-binding/es-exceptions +- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/1d2c5fb36a/WebCryptoAPI +- webidl/ecmascript-binding/es-exceptions: https://github.com/web-platform-tests/wpt/tree/2f96fa1996/webidl/ecmascript-binding/es-exceptions - webmessaging/broadcastchannel: https://github.com/web-platform-tests/wpt/tree/6495c91853/webmessaging/broadcastchannel -- webstorage: https://github.com/web-platform-tests/wpt/tree/1291340aaa/webstorage +- webstorage: https://github.com/web-platform-tests/wpt/tree/1d2c5fb36a/webstorage [Web Platform Tests]: https://github.com/web-platform-tests/wpt [`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-wpt diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/generateKey/failures_Ed448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/generateKey/failures_Ed448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/generateKey/failures_Ed448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/generateKey/failures_Ed448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/generateKey/failures_X448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/generateKey/failures_X448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/generateKey/failures_X448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/generateKey/failures_X448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/generateKey/successes.js b/test/fixtures/wpt/WebCryptoAPI/generateKey/successes.js index a9a168e1adbf72..13c96b7c735fcb 100644 --- a/test/fixtures/wpt/WebCryptoAPI/generateKey/successes.js +++ b/test/fixtures/wpt/WebCryptoAPI/generateKey/successes.js @@ -75,7 +75,7 @@ function run_test(algorithmNames, slowTest) { }) .then(async function (result) { if (resultType === "CryptoKeyPair") { - await Promise.all([ + const [jwkPub,,, jwkPriv] = await Promise.all([ subtle.exportKey('jwk', result.publicKey), subtle.exportKey('spki', result.publicKey), result.publicKey.algorithm.name.startsWith('RSA') ? undefined : subtle.exportKey('raw', result.publicKey), @@ -84,6 +84,15 @@ function run_test(algorithmNames, slowTest) { subtle.exportKey('pkcs8', result.privateKey), ] : []) ]); + + if (extractable) { + // Test that the JWK public key is a superset of the JWK private key. + for (const [prop, value] of Object.entries(jwkPub)) { + if (prop !== 'key_ops') { + assert_equals(value, jwkPriv[prop], `Property ${prop} is equal in public and private JWK`); + } + } + } } else { if (extractable) { await Promise.all([ diff --git a/test/fixtures/wpt/WebCryptoAPI/generateKey/successes_Ed448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/generateKey/successes_Ed448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/generateKey/successes_Ed448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/generateKey/successes_Ed448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/generateKey/successes_X448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/generateKey/successes_X448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/generateKey/successes_X448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/generateKey/successes_X448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/getRandomValues.any.js b/test/fixtures/wpt/WebCryptoAPI/getRandomValues.any.js index 574134eb76dcd8..aecd38efd60bac 100644 --- a/test/fixtures/wpt/WebCryptoAPI/getRandomValues.any.js +++ b/test/fixtures/wpt/WebCryptoAPI/getRandomValues.any.js @@ -60,9 +60,9 @@ for (const array of arrays) { test(function() { const maxlength = 65536 / ctor.BYTES_PER_ELEMENT; - assert_throws_dom("QuotaExceededError", function() { - self.crypto.getRandomValues(new ctor(maxlength + 1)) - }, "crypto.getRandomValues length over 65536") + assert_throws_quotaexceedederror(() => { + self.crypto.getRandomValues(new ctor(maxlength + 1)); + }, null, null, "crypto.getRandomValues length over 65536"); }, "Large length: " + array); test(function() { diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey_failures_fixtures.js b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey_failures_fixtures.js index a2d25e816cbd73..dc0e11d551a922 100644 --- a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey_failures_fixtures.js +++ b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey_failures_fixtures.js @@ -20,11 +20,11 @@ function getMismatchedJWKKeyData(algorithm) { } function getMismatchedKtyField(algorithm) { - return mismatchedKtyField[algorithm.name]; + return mismatchedKtyField[algorithm.namedCurve]; } function getMismatchedCrvField(algorithm) { - return mismatchedCrvField[algorithm.name]; + return mismatchedCrvField[algorithm.namedCurve]; } var validKeyData = { diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/importKey_failures.js b/test/fixtures/wpt/WebCryptoAPI/import_export/importKey_failures.js index 39c70a85470dca..47d12f14500dff 100644 --- a/test/fixtures/wpt/WebCryptoAPI/import_export/importKey_failures.js +++ b/test/fixtures/wpt/WebCryptoAPI/import_export/importKey_failures.js @@ -243,7 +243,7 @@ function run_test(algorithmNames) { allAlgorithmSpecifiersFor(name).forEach(function(algorithm) { getValidKeyData(algorithm).forEach(function(test) { if (test.format === "jwk") { - var data = {crv: test.data.crv, kty: test.data.kty, d: test.data.d, x: test.data.x, d: test.data.d}; + var data = {crv: test.data.crv, kty: test.data.kty, d: test.data.d, x: test.data.x, y: test.data.y}; data.use = "invalid"; var usages = validUsages(vector, 'jwk', test.data); if (usages.length !== 0) diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_Ed448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_Ed448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_Ed448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_Ed448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_X448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_X448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_X448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_X448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_Ed448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_Ed448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_Ed448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_Ed448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_X448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_X448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_X448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_X448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_fixtures.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_fixtures.js index 9bedddc5c5944a..cac6db1dcac758 100644 --- a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_fixtures.js +++ b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey_failures_fixtures.js @@ -2,27 +2,27 @@ // helper functions that generate all possible test parameters for // different situations. function getValidKeyData(algorithm) { - return validKeyData[algorithm.name]; + return validKeyData[algorithm.name || algorithm]; } function getBadKeyLengthData(algorithm) { - return badKeyLengthData[algorithm.name]; + return badKeyLengthData[algorithm.name || algorithm]; } function getMissingJWKFieldKeyData(algorithm) { - return missingJWKFieldKeyData[algorithm.name]; + return missingJWKFieldKeyData[algorithm.name || algorithm]; } function getMismatchedJWKKeyData(algorithm) { - return mismatchedJWKKeyData[algorithm.name]; + return mismatchedJWKKeyData[algorithm.name || algorithm]; } function getMismatchedKtyField(algorithm) { - return mismatchedKtyField[algorithm.name]; + return mismatchedKtyField[algorithm.name || algorithm]; } function getMismatchedCrvField(algorithm) { - return mismatchedCrvField[algorithm.name]; + return mismatchedCrvField[algorithm.name || algorithm]; } var validKeyData = { diff --git a/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_curve448.https.any.js b/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_curve448.tentative.https.any.js similarity index 100% rename from test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_curve448.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_curve448.tentative.https.any.js diff --git a/test/fixtures/wpt/WebCryptoAPI/util/helpers.js b/test/fixtures/wpt/WebCryptoAPI/util/helpers.js index c60371dc6adac9..488eadf29c09da 100644 --- a/test/fixtures/wpt/WebCryptoAPI/util/helpers.js +++ b/test/fixtures/wpt/WebCryptoAPI/util/helpers.js @@ -93,6 +93,10 @@ function objectToString(obj) { // Is key a CryptoKey object with correct algorithm, extractable, and usages? // Is it a secret, private, or public kind of key? function assert_goodCryptoKey(key, algorithm, extractable, usages, kind) { + if (typeof algorithm === "string") { + algorithm = { name: algorithm }; + } + var correctUsages = []; var registeredAlgorithmName; @@ -203,6 +207,7 @@ function allAlgorithmSpecifiersFor(algorithmName) { results.push({name: algorithmName, namedCurve: curveName}); }); } else if (algorithmName.toUpperCase().substring(0, 1) === "X" || algorithmName.toUpperCase().substring(0, 2) === "ED") { + results.push(algorithmName); results.push({ name: algorithmName }); } diff --git a/test/fixtures/wpt/resources/channel.sub.js b/test/fixtures/wpt/resources/channel.sub.js index d93b3b3091947d..370d4f5905e3fa 100644 --- a/test/fixtures/wpt/resources/channel.sub.js +++ b/test/fixtures/wpt/resources/channel.sub.js @@ -511,7 +511,7 @@ * * @param {SendChannel|string} [dest] - Either a SendChannel * to the destination, or the UUID of the destination. If - * omitted, a new UUID is generated, which can be used when + * ommitted, a new UUID is generated, which can be used when * constructing the URL for the global. * */ diff --git a/test/fixtures/wpt/resources/check-layout-th.js b/test/fixtures/wpt/resources/check-layout-th.js index f14ca3246b8ea2..2965a25d146a1f 100644 --- a/test/fixtures/wpt/resources/check-layout-th.js +++ b/test/fixtures/wpt/resources/check-layout-th.js @@ -218,6 +218,7 @@ window.checkLayout = function(selectorList, callDone = true) nodes = Array.prototype.slice.call(nodes); var checkedLayout = false; Array.prototype.forEach.call(nodes, function(node) { + const title = node.title ? `: ${node.title}` : ''; test(function(t) { var container = node.parentNode.className == 'container' ? node.parentNode : node; var prefix = @@ -240,7 +241,7 @@ window.checkLayout = function(selectorList, callDone = true) } checkedLayout |= !passed; } - }, selectorList + ' ' + String(++testNumber)); + }, `${selectorList} ${++testNumber}${title}`); }); if (!checkedLayout) { console.error("No valid data-* attributes found in selector list : " + selectorList); diff --git a/test/fixtures/wpt/resources/check-layout.js b/test/fixtures/wpt/resources/check-layout.js deleted file mode 100644 index 8634481497d701..00000000000000 --- a/test/fixtures/wpt/resources/check-layout.js +++ /dev/null @@ -1,245 +0,0 @@ -(function() { - -function insertAfter(nodeToAdd, referenceNode) -{ - if (referenceNode == document.body) { - document.body.appendChild(nodeToAdd); - return; - } - - if (referenceNode.nextSibling) - referenceNode.parentNode.insertBefore(nodeToAdd, referenceNode.nextSibling); - else - referenceNode.parentNode.appendChild(nodeToAdd); -} - -function positionedAncestor(node) -{ - var ancestor = node.parentNode; - while (getComputedStyle(ancestor).position == 'static') - ancestor = ancestor.parentNode; - return ancestor; -} - -function checkSubtreeExpectedValues(parent, failures) -{ - var checkedLayout = checkExpectedValues(parent, failures); - Array.prototype.forEach.call(parent.childNodes, function(node) { - checkedLayout |= checkSubtreeExpectedValues(node, failures); - }); - return checkedLayout; -} - -function checkAttribute(output, node, attribute) -{ - var result = node.getAttribute && node.getAttribute(attribute); - output.checked |= !!result; - return result; -} - -function checkExpectedValues(node, failures) -{ - var output = { checked: false }; - var expectedWidth = checkAttribute(output, node, "data-expected-width"); - if (expectedWidth) { - if (isNaN(expectedWidth) || Math.abs(node.offsetWidth - expectedWidth) >= 1) - failures.push("Expected " + expectedWidth + " for width, but got " + node.offsetWidth + ". "); - } - - var expectedHeight = checkAttribute(output, node, "data-expected-height"); - if (expectedHeight) { - if (isNaN(expectedHeight) || Math.abs(node.offsetHeight - expectedHeight) >= 1) - failures.push("Expected " + expectedHeight + " for height, but got " + node.offsetHeight + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-offset-x"); - if (expectedOffset) { - if (isNaN(expectedOffset) || Math.abs(node.offsetLeft - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for offsetLeft, but got " + node.offsetLeft + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-offset-y"); - if (expectedOffset) { - if (isNaN(expectedOffset) || Math.abs(node.offsetTop - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for offsetTop, but got " + node.offsetTop + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-positioned-offset-x"); - if (expectedOffset) { - var actualOffset = node.getBoundingClientRect().left - positionedAncestor(node).getBoundingClientRect().left; - if (isNaN(expectedOffset) || Math.abs(actualOffset - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for getBoundingClientRect().left offset, but got " + actualOffset + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-positioned-offset-y"); - if (expectedOffset) { - var actualOffset = node.getBoundingClientRect().top - positionedAncestor(node).getBoundingClientRect().top; - if (isNaN(expectedOffset) || Math.abs(actualOffset - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for getBoundingClientRect().top offset, but got " + actualOffset + ". "); - } - - var expectedWidth = checkAttribute(output, node, "data-expected-client-width"); - if (expectedWidth) { - if (isNaN(expectedWidth) || Math.abs(node.clientWidth - expectedWidth) >= 1) - failures.push("Expected " + expectedWidth + " for clientWidth, but got " + node.clientWidth + ". "); - } - - var expectedHeight = checkAttribute(output, node, "data-expected-client-height"); - if (expectedHeight) { - if (isNaN(expectedHeight) || Math.abs(node.clientHeight - expectedHeight) >= 1) - failures.push("Expected " + expectedHeight + " for clientHeight, but got " + node.clientHeight + ". "); - } - - var expectedWidth = checkAttribute(output, node, "data-expected-scroll-width"); - if (expectedWidth) { - if (isNaN(expectedWidth) || Math.abs(node.scrollWidth - expectedWidth) >= 1) - failures.push("Expected " + expectedWidth + " for scrollWidth, but got " + node.scrollWidth + ". "); - } - - var expectedHeight = checkAttribute(output, node, "data-expected-scroll-height"); - if (expectedHeight) { - if (isNaN(expectedHeight) || Math.abs(node.scrollHeight - expectedHeight) >= 1) - failures.push("Expected " + expectedHeight + " for scrollHeight, but got " + node.scrollHeight + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-total-x"); - if (expectedOffset) { - var totalLeft = node.clientLeft + node.offsetLeft; - if (isNaN(expectedOffset) || Math.abs(totalLeft - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for clientLeft+offsetLeft, but got " + totalLeft + ", clientLeft: " + node.clientLeft + ", offsetLeft: " + node.offsetLeft + ". "); - } - - var expectedOffset = checkAttribute(output, node, "data-total-y"); - if (expectedOffset) { - var totalTop = node.clientTop + node.offsetTop; - if (isNaN(expectedOffset) || Math.abs(totalTop - expectedOffset) >= 1) - failures.push("Expected " + expectedOffset + " for clientTop+offsetTop, but got " + totalTop + ", clientTop: " + node.clientTop + ", + offsetTop: " + node.offsetTop + ". "); - } - - var expectedDisplay = checkAttribute(output, node, "data-expected-display"); - if (expectedDisplay) { - var actualDisplay = getComputedStyle(node).display; - if (actualDisplay != expectedDisplay) - failures.push("Expected " + expectedDisplay + " for display, but got " + actualDisplay + ". "); - } - - var expectedPaddingTop = checkAttribute(output, node, "data-expected-padding-top"); - if (expectedPaddingTop) { - var actualPaddingTop = getComputedStyle(node).paddingTop; - // Trim the unit "px" from the output. - actualPaddingTop = actualPaddingTop.substring(0, actualPaddingTop.length - 2); - if (actualPaddingTop != expectedPaddingTop) - failures.push("Expected " + expectedPaddingTop + " for padding-top, but got " + actualPaddingTop + ". "); - } - - var expectedPaddingBottom = checkAttribute(output, node, "data-expected-padding-bottom"); - if (expectedPaddingBottom) { - var actualPaddingBottom = getComputedStyle(node).paddingBottom; - // Trim the unit "px" from the output. - actualPaddingBottom = actualPaddingBottom.substring(0, actualPaddingBottom.length - 2); - if (actualPaddingBottom != expectedPaddingBottom) - failures.push("Expected " + expectedPaddingBottom + " for padding-bottom, but got " + actualPaddingBottom + ". "); - } - - var expectedPaddingLeft = checkAttribute(output, node, "data-expected-padding-left"); - if (expectedPaddingLeft) { - var actualPaddingLeft = getComputedStyle(node).paddingLeft; - // Trim the unit "px" from the output. - actualPaddingLeft = actualPaddingLeft.substring(0, actualPaddingLeft.length - 2); - if (actualPaddingLeft != expectedPaddingLeft) - failures.push("Expected " + expectedPaddingLeft + " for padding-left, but got " + actualPaddingLeft + ". "); - } - - var expectedPaddingRight = checkAttribute(output, node, "data-expected-padding-right"); - if (expectedPaddingRight) { - var actualPaddingRight = getComputedStyle(node).paddingRight; - // Trim the unit "px" from the output. - actualPaddingRight = actualPaddingRight.substring(0, actualPaddingRight.length - 2); - if (actualPaddingRight != expectedPaddingRight) - failures.push("Expected " + expectedPaddingRight + " for padding-right, but got " + actualPaddingRight + ". "); - } - - var expectedMarginTop = checkAttribute(output, node, "data-expected-margin-top"); - if (expectedMarginTop) { - var actualMarginTop = getComputedStyle(node).marginTop; - // Trim the unit "px" from the output. - actualMarginTop = actualMarginTop.substring(0, actualMarginTop.length - 2); - if (actualMarginTop != expectedMarginTop) - failures.push("Expected " + expectedMarginTop + " for margin-top, but got " + actualMarginTop + ". "); - } - - var expectedMarginBottom = checkAttribute(output, node, "data-expected-margin-bottom"); - if (expectedMarginBottom) { - var actualMarginBottom = getComputedStyle(node).marginBottom; - // Trim the unit "px" from the output. - actualMarginBottom = actualMarginBottom.substring(0, actualMarginBottom.length - 2); - if (actualMarginBottom != expectedMarginBottom) - failures.push("Expected " + expectedMarginBottom + " for margin-bottom, but got " + actualMarginBottom + ". "); - } - - var expectedMarginLeft = checkAttribute(output, node, "data-expected-margin-left"); - if (expectedMarginLeft) { - var actualMarginLeft = getComputedStyle(node).marginLeft; - // Trim the unit "px" from the output. - actualMarginLeft = actualMarginLeft.substring(0, actualMarginLeft.length - 2); - if (actualMarginLeft != expectedMarginLeft) - failures.push("Expected " + expectedMarginLeft + " for margin-left, but got " + actualMarginLeft + ". "); - } - - var expectedMarginRight = checkAttribute(output, node, "data-expected-margin-right"); - if (expectedMarginRight) { - var actualMarginRight = getComputedStyle(node).marginRight; - // Trim the unit "px" from the output. - actualMarginRight = actualMarginRight.substring(0, actualMarginRight.length - 2); - if (actualMarginRight != expectedMarginRight) - failures.push("Expected " + expectedMarginRight + " for margin-right, but got " + actualMarginRight + ". "); - } - - return output.checked; -} - -window.checkLayout = function(selectorList, outputContainer) -{ - var result = true; - if (!selectorList) { - document.body.appendChild(document.createTextNode("You must provide a CSS selector of nodes to check.")); - return; - } - var nodes = document.querySelectorAll(selectorList); - nodes = Array.prototype.slice.call(nodes); - nodes.reverse(); - var checkedLayout = false; - Array.prototype.forEach.call(nodes, function(node) { - var failures = []; - checkedLayout |= checkExpectedValues(node.parentNode, failures); - checkedLayout |= checkSubtreeExpectedValues(node, failures); - - var container = node.parentNode.className == 'container' ? node.parentNode : node; - - var pre = document.createElement('pre'); - if (failures.length) { - pre.className = 'FAIL'; - result = false; - } - pre.appendChild(document.createTextNode(failures.length ? "FAIL:\n" + failures.join('\n') + '\n\n' + container.outerHTML : "PASS")); - - var referenceNode = container; - if (outputContainer) { - if (!outputContainer.lastChild) { - // Inserting a text node so we have something to insertAfter. - outputContainer.textContent = " "; - } - referenceNode = outputContainer.lastChild; - } - insertAfter(pre, referenceNode); - }); - - if (!checkedLayout) { - document.body.appendChild(document.createTextNode("FAIL: No valid data-* attributes found in selector list : " + selectorList)); - return false; - } - - return result; -} - -})(); diff --git a/test/fixtures/wpt/resources/declarative-shadow-dom-polyfill.js b/test/fixtures/wpt/resources/declarative-shadow-dom-polyfill.js deleted file mode 100644 index 99a3e911eb6336..00000000000000 --- a/test/fixtures/wpt/resources/declarative-shadow-dom-polyfill.js +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Polyfill for attaching shadow trees for declarative Shadow DOM for - * implementations that do not support declarative Shadow DOM. - * - * Note: this polyfill will feature-detect the native feature, and do nothing - * if supported. - * - * See: https://github.com/whatwg/html/pull/5465 - * - * root: The root of the subtree in which to upgrade shadow roots - * - */ - -function polyfill_declarative_shadow_dom(root) { - if (HTMLTemplateElement.prototype.hasOwnProperty('shadowRootMode')) - return; - root.querySelectorAll("template[shadowrootmode]").forEach(template => { - const mode = template.getAttribute("shadowrootmode"); - const delegatesFocus = template.hasAttribute("shadowrootdelegatesfocus"); - const shadowRoot = template.parentNode.attachShadow({ mode, delegatesFocus }); - shadowRoot.appendChild(template.content); - template.remove(); - polyfill_declarative_shadow_dom(shadowRoot); - }); -} diff --git a/test/fixtures/wpt/resources/idlharness-shadowrealm.js b/test/fixtures/wpt/resources/idlharness-shadowrealm.js deleted file mode 100644 index 9484ca6f512ad0..00000000000000 --- a/test/fixtures/wpt/resources/idlharness-shadowrealm.js +++ /dev/null @@ -1,61 +0,0 @@ -// TODO: it would be nice to support `idl_array.add_objects` -function fetch_text(url) { - return fetch(url).then(function (r) { - if (!r.ok) { - throw new Error("Error fetching " + url + "."); - } - return r.text(); - }); -} - -/** - * idl_test_shadowrealm is a promise_test wrapper that handles the fetching of the IDL, and - * running the code in a `ShadowRealm`, avoiding repetitive boilerplate. - * - * @see https://github.com/tc39/proposal-shadowrealm - * @param {String[]} srcs Spec name(s) for source idl files (fetched from - * /interfaces/{name}.idl). - * @param {String[]} deps Spec name(s) for dependency idl files (fetched - * from /interfaces/{name}.idl). Order is important - dependencies from - * each source will only be included if they're already know to be a - * dependency (i.e. have already been seen). - */ -function idl_test_shadowrealm(srcs, deps) { - promise_setup(async t => { - const realm = new ShadowRealm(); - // https://github.com/web-platform-tests/wpt/issues/31996 - realm.evaluate("globalThis.self = globalThis; undefined;"); - - realm.evaluate(` - globalThis.self.GLOBAL = { - isWindow: function() { return false; }, - isWorker: function() { return false; }, - isShadowRealm: function() { return true; }, - }; undefined; - `); - const specs = await Promise.all(srcs.concat(deps).map(spec => { - return fetch_text("/interfaces/" + spec + ".idl"); - })); - const idls = JSON.stringify(specs); - await new Promise( - realm.evaluate(`(resolve,reject) => { - (async () => { - await import("/resources/testharness.js"); - await import("/resources/WebIDLParser.js"); - await import("/resources/idlharness.js"); - const idls = ${idls}; - const idl_array = new IdlArray(); - for (let i = 0; i < ${srcs.length}; i++) { - idl_array.add_idls(idls[i]); - } - for (let i = ${srcs.length}; i < ${srcs.length + deps.length}; i++) { - idl_array.add_dependency_idls(idls[i]); - } - idl_array.test(); - })().then(resolve, (e) => reject(e.toString())); - }`) - ); - await fetch_tests_from_shadow_realm(realm); - }); -} -// vim: set expandtab shiftwidth=4 tabstop=4 foldmarker=@{,@} foldmethod=marker: diff --git a/test/fixtures/wpt/resources/idlharness.js b/test/fixtures/wpt/resources/idlharness.js index 056fcdd4a1076a..2eb710c1827cda 100644 --- a/test/fixtures/wpt/resources/idlharness.js +++ b/test/fixtures/wpt/resources/idlharness.js @@ -566,6 +566,7 @@ IdlArray.prototype.is_json_type = function(type) case "Uint8ClampedArray": case "BigInt64Array": case "BigUint64Array": + case "Float16Array": case "Float32Array": case "Float64Array": case "ArrayBuffer": @@ -733,7 +734,7 @@ IdlArray.prototype.test = function() Object.getOwnPropertyNames(this.members).forEach(function(memberName) { var member = this.members[memberName]; - if (!(member instanceof IdlInterface)) { + if (!(member instanceof IdlInterface || member instanceof IdlNamespace)) { return; } @@ -782,6 +783,10 @@ IdlArray.prototype.merge_partials = function() } testedPartials.set(parsed_idl.name, partialTestCount); + if (!self.shouldRunSubTest(partialTestName)) { + return; + } + if (!parsed_idl.untested) { test(function () { assert_true(originalExists, `Original ${parsed_idl.type} should be defined`); @@ -809,7 +814,7 @@ IdlArray.prototype.merge_partials = function() { // Special-case "Exposed". Must be a subset of original interface's exposure. // Exposed on a partial is the equivalent of having the same Exposed on all nested members. - // See https://github.com/heycam/webidl/issues/154 for discrepancy between Exposed and + // See https://github.com/heycam/webidl/issues/154 for discrepency between Exposed and // other extended attributes on partial interfaces. const exposureAttr = parsed_idl.extAttrs.find(a => a.name === "Exposed"); if (exposureAttr) { @@ -870,6 +875,7 @@ IdlArray.prototype.merge_mixins = function() { const lhs = parsed_idl.target; const rhs = parsed_idl.includes; + const testName = lhs + " includes " + rhs + ": member names are unique"; var errStr = lhs + " includes " + rhs + ", but "; if (!(lhs in this.members)) throw errStr + lhs + " is undefined."; @@ -877,7 +883,7 @@ IdlArray.prototype.merge_mixins = function() if (!(rhs in this.members)) throw errStr + rhs + " is undefined."; if (!(this.members[rhs] instanceof IdlInterface)) throw errStr + rhs + " is not an interface."; - if (this.members[rhs].members.length) { + if (this.members[rhs].members.length && self.shouldRunSubTest(testName)) { test(function () { var clash = this.members[rhs].members.find(function(member) { return this.members[lhs].members.find(function(m) { @@ -891,7 +897,7 @@ IdlArray.prototype.merge_mixins = function() this.members[lhs].members.push(new IdlInterfaceMember(member)); }.bind(this)); assert_true(!clash, "member " + (clash && clash.name) + " is unique"); - }.bind(this), lhs + " includes " + rhs + ": member names are unique"); + }.bind(this), testName); } } this.includes = []; @@ -1420,7 +1426,7 @@ IdlInterface.prototype.test = function() if (!this.untested) { subsetTestByKey(this.name, test, function() { - assert_false(this.name in self); + assert_false(this.name in self, this.name + " interface should not exist"); }.bind(this), this.name + " interface: existence and properties of interface object"); } return; @@ -3450,6 +3456,17 @@ IdlNamespace.prototype.test_self = function () IdlNamespace.prototype.test = function () { + // If the namespace object is not exposed, only test that. Members can't be + // tested either + if (!this.exposed) { + if (!this.untested) { + subsetTestByKey(this.name, test, function() { + assert_false(this.name in self, this.name + " namespace should not exist"); + }.bind(this), this.name + " namespace: existence and properties of namespace object"); + } + return; + } + if (!this.untested) { this.test_self(); } @@ -3497,7 +3514,7 @@ function idl_test(srcs, deps, idl_setup_func) { "require-exposed" ]; return Promise.all( - srcs.concat(deps).map(fetch_spec)) + srcs.concat(deps).map(globalThis.fetch_spec)) .then(function(results) { const astArray = results.map(result => WebIDL2.parse(result.idl, { sourceName: result.spec }) @@ -3538,9 +3555,11 @@ function idl_test(srcs, deps, idl_setup_func) { }); }, 'idl_test setup'); } +globalThis.idl_test = idl_test; /** * fetch_spec is a shorthand for a Promise that fetches the spec's content. + * Note: ShadowRealm-specific implementation in testharness-shadowrealm-inner.js */ function fetch_spec(spec) { var url = '/interfaces/' + spec + '.idl'; diff --git a/test/fixtures/wpt/resources/out-of-scope-test.js b/test/fixtures/wpt/resources/out-of-scope-test.js new file mode 100644 index 00000000000000..ce24124646dc83 --- /dev/null +++ b/test/fixtures/wpt/resources/out-of-scope-test.js @@ -0,0 +1,5 @@ +// Testing that the resolution is correct using `resolve`, as you can't import +// the same module twice. +window.outscope_test_result = import.meta.resolve("a"); +window.outscope_test_result2 = import.meta.resolve("../resources/log.sub.js?name=E"); + diff --git a/test/fixtures/wpt/resources/testdriver-actions.js b/test/fixtures/wpt/resources/testdriver-actions.js index e550ff0b1dc903..edb4759954d4c3 100644 --- a/test/fixtures/wpt/resources/testdriver-actions.js +++ b/test/fixtures/wpt/resources/testdriver-actions.js @@ -32,7 +32,7 @@ * await actions.send(); * * @param {number} [defaultTickDuration] - The default duration of a - * tick. Be default this is set to 16ms, which is one frame time + * tick. Be default this is set ot 16ms, which is one frame time * based on 60Hz display. */ function Actions(defaultTickDuration=16) { @@ -290,7 +290,7 @@ }, /** - * Create a keyDown event for the current default key source + * Create a keyUp event for the current default key source * * @param {String} key - Key to release * @param {String?} sourceName - Named key source to use or null for the default key source diff --git a/test/fixtures/wpt/resources/testdriver-actions.js.headers b/test/fixtures/wpt/resources/testdriver-actions.js.headers new file mode 100644 index 00000000000000..5e8f640c6659d1 --- /dev/null +++ b/test/fixtures/wpt/resources/testdriver-actions.js.headers @@ -0,0 +1,2 @@ +Content-Type: text/javascript; charset=utf-8 +Cache-Control: max-age=3600 diff --git a/test/fixtures/wpt/resources/testdriver.js b/test/fixtures/wpt/resources/testdriver.js index ddf723cb3ee8a5..5b390dedeb72bb 100644 --- a/test/fixtures/wpt/resources/testdriver.js +++ b/test/fixtures/wpt/resources/testdriver.js @@ -3,6 +3,26 @@ var idCounter = 0; let testharness_context = null; + const features = (() => { + function getFeatures(scriptSrc) { + try { + const url = new URL(scriptSrc); + return url.searchParams.getAll('feature'); + } catch (e) { + return []; + } + } + + return getFeatures(document?.currentScript?.src ?? ''); + })(); + + function assertBidiIsEnabled(){ + if (!features.includes('bidi')) { + throw new Error( + "`?feature=bidi` is missing when importing testdriver.js but the test is using WebDriver BiDi APIs"); + } + } + function getInViewCenterPoint(rect) { var left = Math.max(0, rect.left); var right = Math.min(window.innerWidth, rect.right); @@ -49,6 +69,924 @@ * @namespace {test_driver} */ window.test_driver = { + /** + Represents `WebDriver BiDi `_ protocol. + */ + bidi: { + /** + * @typedef {(String|WindowProxy)} Context A browsing context. Can + * be specified by its ID (a string) or using a `WindowProxy` + * object. + */ + /** + * `bluetooth `_ module. + */ + bluetooth: { + /** + * Handle a bluetooth device prompt with the given params. Matches the + * `bluetooth.handleRequestDevicePrompt + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.handleRequestDevicePrompt({ + * prompt: "pmt-e0a234b", + * accept: true, + * device: "dvc-9b3b872" + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.prompt - The id of a bluetooth device prompt. + * Matches the + * `bluetooth.HandleRequestDevicePromptParameters:prompt `_ + * value. + * @param {bool} params.accept - Whether to accept a bluetooth device prompt. + * Matches the + * `bluetooth.HandleRequestDevicePromptAcceptParameters:accept `_ + * value. + * @param {string} params.device - The device id from a bluetooth device + * prompt to be accepted. Matches the + * `bluetooth.HandleRequestDevicePromptAcceptParameters:device `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the bluetooth device prompt should be handled. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the bluetooth device prompt + * is handled, or rejected if the operation fails. + */ + handle_request_device_prompt: function(params) { + return window.test_driver_internal.bidi.bluetooth + .handle_request_device_prompt(params); + }, + /** + * Creates a simulated bluetooth adapter with the given params. Matches the + * `bluetooth.simulateAdapter `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_adapter({ + * state: "powered-on" + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.state The state of the simulated bluetooth adapter. + * Matches the + * `bluetooth.SimulateAdapterParameters:state `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the simulated bluetooth adapter should be set. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the simulated bluetooth adapter is created + * and set, or rejected if the operation fails. + */ + simulate_adapter: function (params) { + return window.test_driver_internal.bidi.bluetooth.simulate_adapter(params); + }, + /** + * Disables the bluetooth simulation with the given params. Matches the + * `bluetooth.disableSimulation `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.disable_simulation(); + * + * @param {object} params - Parameters for the command. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context to disable the simulation for. If not provided, the + * current browsing context is used. + * @returns {Promise} fulfilled after the simulation is disabled, or rejected if + * the operation fails. + */ + disable_simulation: function (params) { + return window.test_driver_internal.bidi.bluetooth.disable_simulation(params); + }, + /** + * Creates a simulated bluetooth peripheral with the given params. + * Matches the + * `bluetooth.simulatePreconnectedPeripheral `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulatePreconnectedPeripheral({ + * "address": "09:09:09:09:09:09", + * "name": "Some Device", + * "manufacturerData": [{key: 17, data: "AP8BAX8="}], + * "knownServiceUuids": [ + * "12345678-1234-5678-9abc-def123456789", + * ], + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated + * bluetooth peripheral. Matches the + * `bluetooth.SimulatePreconnectedPeripheralParameters:address `_ + * value. + * @param {string} params.name - The name of the simulated bluetooth + * peripheral. Matches the + * `bluetooth.SimulatePreconnectedPeripheralParameters:name `_ + * value. + * @param {Array.ManufacturerData} params.manufacturerData - The manufacturerData of the + * simulated bluetooth peripheral. Matches the + * `bluetooth.SimulatePreconnectedPeripheralParameters:manufacturerData `_ + * value. + * @param {string} params.knownServiceUuids - The knownServiceUuids of + * the simulated bluetooth peripheral. Matches the + * `bluetooth.SimulatePreconnectedPeripheralParameters:knownServiceUuids `_ + * value. + * @param {Context} [params.context] The optional context parameter + * specifies in which browsing context the simulated bluetooth peripheral should be + * set. If not provided, the current browsing context is used. + * @returns {Promise} fulfilled after the simulated bluetooth peripheral is created + * and set, or rejected if the operation fails. + */ + simulate_preconnected_peripheral: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_preconnected_peripheral(params); + }, + /** + * Simulates a GATT connection response for a given peripheral. + * Matches the `bluetooth.simulateGattConnectionResponse + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_gatt_connection_response({ + * "address": "09:09:09:09:09:09", + * "code": 0x0 + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated + * bluetooth peripheral. Matches the + * `bluetooth.SimulateGattConnectionResponseParameters:peripheral `_ + * value. + * @param {number} params.code - The response code for a GATT connection attempted. + * Matches the + * `bluetooth.SimulateGattConnectionResponseParameters:code `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT connection response should be simulated. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT connection response + * is simulated, or rejected if the operation fails. + */ + simulate_gatt_connection_response: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_gatt_connection_response(params); + }, + /** + * Simulates a GATT disconnection for a given peripheral. + * Matches the `bluetooth.simulateGattDisconnection + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_gatt_disconnection({ + * "address": "09:09:09:09:09:09", + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated + * bluetooth peripheral. Matches the + * `bluetooth.SimulateGattDisconnectionParameters:address `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT disconnection should be simulated. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT disconnection + * is simulated, or rejected if the operation fails. + */ + simulate_gatt_disconnection: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_gatt_disconnection(params); + }, + /** + * Simulates a GATT service. + * Matches the `bluetooth.simulateService + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_service({ + * "address": "09:09:09:09:09:09", + * "uuid": "0000180d-0000-1000-8000-00805f9b34fb", + * "type": "add" + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated bluetooth peripheral this service belongs to. + * Matches the + * `bluetooth.SimulateServiceParameters:address `_ + * value. + * @param {string} params.uuid - The uuid of the simulated GATT service. + * Matches the + * `bluetooth.SimulateServiceParameters:address `_ + * value. + * @param {string} params.type - The type of the GATT service simulation, either "add" or "remove". + * Matches the + * `bluetooth.SimulateServiceParameters:address `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT service should be simulated. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT service + * is simulated, or rejected if the operation fails. + */ + simulate_service: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_service(params); + }, + /** + * Simulates a GATT characteristic. + * Matches the `bluetooth.simulateCharacteristic + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_characteristic({ + * "address": "09:09:09:09:09:09", + * "serviceUuid": "0000180d-0000-1000-8000-00805f9b34fb", + * "characteristicUuid": "00002a21-0000-1000-8000-00805f9b34fb", + * "characteristicProperties": { + * "read": true, + * "write": true, + * "notify": true + * }, + * "type": "add" + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated bluetooth peripheral the characterisitc belongs to. + * Matches the + * `bluetooth.SimulateCharacteristicParameters:address `_ + * value. + * @param {string} params.serviceUuid - The uuid of the simulated GATT service the characterisitc belongs to. + * Matches the + * `bluetooth.SimulateCharacteristicParameters:address `_ + * value. + * @param {string} params.characteristicUuid - The uuid of the simulated GATT characteristic. + * Matches the + * `bluetooth.SimulateCharacteristicParameters:address `_ + * value. + * @param {string} params.characteristicProperties - The properties of the simulated GATT characteristic. + * Matches the + * `bluetooth.SimulateCharacteristicParameters:address `_ + * value. + * @param {string} params.type - The type of the GATT characterisitc simulation, either "add" or "remove". + * Matches the + * `bluetooth.SimulateCharacteristicParameters:address `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT characteristic should be simulated. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT characteristic + * is simulated, or rejected if the operation fails. + */ + simulate_characteristic: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_characteristic(params); + }, + /** + * Simulates a GATT characteristic response. + * Matches the `bluetooth.simulateCharacteristicResponse + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_characteristic({ + * "address": "09:09:09:09:09:09", + * "serviceUuid": "0000180d-0000-1000-8000-00805f9b34fb", + * "characteristicUuid": "00002a21-0000-1000-8000-00805f9b34fb", + * "type": "read", + * "code": 0, + * "data": [1, 2] + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated + * bluetooth peripheral. Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value. + * @param {string} params.serviceUuid - The uuid of the simulated GATT service the characterisitc belongs to. + * Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value. + * @param {string} params.characteristicUuid - The uuid of the simulated characteristic. + * Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value. + * @param {string} params.type - The type of the simulated GATT characteristic operation." + * Can be "read", "write", "subscribe-to-notifications" or "unsubscribe-from-notifications". + * Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value. + * @param {string} params.code - The simulated GATT characteristic response code. + * Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value.* + * @param {string} params.data - The data along with the simulated GATT characteristic response. + * Matches the + * `bluetooth.SimulateCharacteristicResponseParameters:address `_ + * value.** + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT characteristic belongs to. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT characteristic + * is simulated, or rejected if the operation fails. + */ + simulate_characteristic_response: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_characteristic_response(params); + }, + /** + * Simulates a GATT descriptor. + * Matches the `bluetooth.simulateDescriptor + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_descriptor({ + * "address": "09:09:09:09:09:09", + * "serviceUuid": "0000180d-0000-1000-8000-00805f9b34fb", + * "characteristicUuid": "00002a21-0000-1000-8000-00805f9b34fb", + * "descriptorUuid": "00002901-0000-1000-8000-00805f9b34fb", + * "type": "add" + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated bluetooth peripheral the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorParameters:address `_ + * value. + * @param {string} params.serviceUuid - The uuid of the simulated GATT service the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorParameters:address `_ + * value. + * @param {string} params.characteristicUuid - The uuid of the simulated GATT characterisitc the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorParameters:address `_ + * value. + * @param {string} params.descriptorUuid - The uuid of the simulated GATT descriptor. + * Matches the + * `bluetooth.SimulateDescriptorParameters:address `_ + * value.* + * @param {string} params.type - The type of the GATT descriptor simulation, either "add" or "remove". + * Matches the + * `bluetooth.SimulateDescriptorParameters:address `_ + * value. + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT descriptor should be simulated. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT descriptor + * is simulated, or rejected if the operation fails. + */ + simulate_descriptor: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_descriptor(params); + }, + /** + * Simulates a GATT descriptor response. + * Matches the `bluetooth.simulateDescriptorResponse + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.bluetooth.simulate_descriptor_response({ + * "address": "09:09:09:09:09:09", + * "serviceUuid": "0000180d-0000-1000-8000-00805f9b34fb", + * "characteristicUuid": "00002a21-0000-1000-8000-00805f9b34fb", + * "descriptorUuid": "00002901-0000-1000-8000-00805f9b34fb", + * "type": "read", + * "code": 0, + * "data": [1, 2] + * }); + * + * @param {object} params - Parameters for the command. + * @param {string} params.address - The address of the simulated bluetooth peripheral the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value. + * @param {string} params.serviceUuid - The uuid of the simulated GATT service the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value. + * @param {string} params.characteristicUuid - The uuid of the simulated GATT characterisitc the descriptor belongs to. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value. + * @param {string} params.descriptorUuid - The uuid of the simulated GATT descriptor. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value. + * @param {string} params.type - The type of the simulated GATT descriptor operation. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value. + * @param {string} params.code - The simulated GATT descriptor response code. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value.* + * @param {string} params.data - The data along with the simulated GATT descriptor response. + * Matches the + * `bluetooth.SimulateDescriptorResponseParameters:address `_ + * value.** + * @param {Context} [params.context] The optional context parameter specifies in + * which browsing context the GATT descriptor belongs to. If not + * provided, the current browsing context is used. + * @returns {Promise} fulfilled after the GATT descriptor response + * is simulated, or rejected if the operation fails. + */ + simulate_descriptor_response: function(params) { + return window.test_driver_internal.bidi.bluetooth + .simulate_descriptor_response(params); + }, + /** + * `bluetooth.RequestDevicePromptUpdatedParameters `_ + * event. + */ + request_device_prompt_updated: { + /** + * @typedef {object} RequestDevicePromptUpdated + * `bluetooth.RequestDevicePromptUpdatedParameters `_ + * event. + */ + + /** + * Subscribes to the event. Events will be emitted only if + * there is a subscription for the event. This method does + * not add actual listeners. To listen to the event, use the + * `on` or `once` methods. The buffered events will be + * emitted before the command promise is resolved. + * + * @param {object} [params] Parameters for the subscription. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing + * contexts to subscribe to the event on. It should be + * either an array of Context objects, or null. If null, the + * event will be subscribed to globally. If omitted, the + * event will be subscribed to on the current browsing + * context. + * @returns {Promise<(function(): Promise)>} Callback + * for unsubscribing from the created subscription. + */ + subscribe: async function(params = {}) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .request_device_prompt_updated.subscribe(params); + }, + /** + * Adds an event listener for the event. + * + * @param {function(RequestDevicePromptUpdated): void} callback The + * callback to be called when the event is emitted. The + * callback is called with the event object as a parameter. + * @returns {function(): void} A function that removes the + * added event listener when called. + */ + on: function(callback) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .request_device_prompt_updated.on(callback); + }, + /** + * Adds an event listener for the event that is only called + * once and removed afterward. + * + * @return {Promise} The promise which + * is resolved with the event object when the event is emitted. + */ + once: function() { + assertBidiIsEnabled(); + return new Promise(resolve => { + const remove_handler = + window.test_driver_internal.bidi.bluetooth + .request_device_prompt_updated.on(event => { + resolve(event); + remove_handler(); + }); + }); + }, + }, + /** + * `bluetooth.GattConnectionAttemptedParameters `_ + * event. + */ + gatt_connection_attempted: { + /** + * @typedef {object} GattConnectionAttempted + * `bluetooth.GattConnectionAttempted `_ + * event. + */ + + /** + * Subscribes to the event. Events will be emitted only if + * there is a subscription for the event. This method does + * not add actual listeners. To listen to the event, use the + * `on` or `once` methods. The buffered events will be + * emitted before the command promise is resolved. + * + * @param {object} [params] Parameters for the subscription. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing + * contexts to subscribe to the event on. It should be + * either an array of Context objects, or null. If null, the + * event will be subscribed to globally. If omitted, the + * event will be subscribed to on the current browsing + * context. + * @returns {Promise<(function(): Promise)>} Callback + * for unsubscribing from the created subscription. + */ + subscribe: async function(params = {}) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .gatt_connection_attempted.subscribe(params); + }, + /** + * Adds an event listener for the event. + * + * @param {function(GattConnectionAttempted): void} callback The + * callback to be called when the event is emitted. The + * callback is called with the event object as a parameter. + * @returns {function(): void} A function that removes the + * added event listener when called. + */ + on: function(callback) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .gatt_connection_attempted.on(callback); + }, + /** + * Adds an event listener for the event that is only called + * once and removed afterward. + * + * @return {Promise} The promise which + * is resolved with the event object when the event is emitted. + */ + once: function() { + assertBidiIsEnabled(); + return new Promise(resolve => { + const remove_handler = + window.test_driver_internal.bidi.bluetooth + .gatt_connection_attempted.on(event => { + resolve(event); + remove_handler(); + }); + }); + }, + }, + /** + * `bluetooth.CharacteristicEventGeneratedParameters `_ + * event. + */ + characteristic_event_generated: { + /** + * @typedef {object} CharacteristicEventGenerated + * `bluetooth.CharacteristicEventGenerated `_ + * event. + */ + + /** + * Subscribes to the event. Events will be emitted only if + * there is a subscription for the event. This method does + * not add actual listeners. To listen to the event, use the + * `on` or `once` methods. The buffered events will be + * emitted before the command promise is resolved. + * + * @param {object} [params] Parameters for the subscription. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing + * contexts to subscribe to the event on. It should be + * either an array of Context objects, or null. If null, the + * event will be subscribed to globally. If omitted, the + * event will be subscribed to on the current browsing + * context. + * @returns {Promise<(function(): Promise)>} Callback + * for unsubscribing from the created subscription. + */ + subscribe: async function(params = {}) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .characteristic_event_generated.subscribe(params); + }, + /** + * Adds an event listener for the event. + * + * @param {function(CharacteristicEventGenerated): void} callback The + * callback to be called when the event is emitted. The + * callback is called with the event object as a parameter. + * @returns {function(): void} A function that removes the + * added event listener when called. + */ + on: function(callback) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .characteristic_event_generated.on(callback); + }, + /** + * Adds an event listener for the event that is only called + * once and removed afterward. + * + * @return {Promise} The promise which + * is resolved with the event object when the event is emitted. + */ + once: function() { + assertBidiIsEnabled(); + return new Promise(resolve => { + const remove_handler = + window.test_driver_internal.bidi.bluetooth + .characteristic_event_generated.on(event => { + resolve(event); + remove_handler(); + }); + }); + }, + }, + /** + * `bluetooth.DescriptorEventGeneratedParameters `_ + * event. + */ + descriptor_event_generated: { + /** + * @typedef {object} DescriptorEventGenerated + * `bluetooth.DescriptorEventGenerated `_ + * event. + */ + + /** + * Subscribes to the event. Events will be emitted only if + * there is a subscription for the event. This method does + * not add actual listeners. To listen to the event, use the + * `on` or `once` methods. The buffered events will be + * emitted before the command promise is resolved. + * + * @param {object} [params] Parameters for the subscription. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing + * contexts to subscribe to the event on. It should be + * either an array of Context objects, or null. If null, the + * event will be subscribed to globally. If omitted, the + * event will be subscribed to on the current browsing + * context. + * @returns {Promise<(function(): Promise)>} Callback + * for unsubscribing from the created subscription. + */ + subscribe: async function(params = {}) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .descriptor_event_generated.subscribe(params); + }, + /** + * Adds an event listener for the event. + * + * @param {function(DescriptorEventGenerated): void} callback The + * callback to be called when the event is emitted. The + * callback is called with the event object as a parameter. + * @returns {function(): void} A function that removes the + * added event listener when called. + */ + on: function(callback) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.bluetooth + .descriptor_event_generated.on(callback); + }, + /** + * Adds an event listener for the event that is only called + * once and removed afterward. + * + * @return {Promise} The promise which + * is resolved with the event object when the event is emitted. + */ + once: function() { + assertBidiIsEnabled(); + return new Promise(resolve => { + const remove_handler = + window.test_driver_internal.bidi.bluetooth + .descriptor_event_generated.on(event => { + resolve(event); + remove_handler(); + }); + }); + }, + } + }, + /** + * `emulation `_ module. + */ + emulation: { + /** + * Overrides the geolocation coordinates for the specified + * browsing contexts. + * Matches the `emulation.setGeolocationOverride + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.emulation.set_geolocation_override({ + * coordinates: { + * latitude: 52.51, + * longitude: 13.39, + * accuracy: 0.5, + * altitude: 34, + * altitudeAccuracy: 0.75, + * heading: 180, + * speed: 2.77 + * } + * }); + * + * @param {object} params - Parameters for the command. + * @param {null|object} params.coordinates - The optional + * geolocation coordinates to set. Matches the + * `emulation.GeolocationCoordinates `_ + * value. If null or omitted and the `params.error` is set, the + * emulation will be removed. Mutually exclusive with + * `params.error`. + * @param {object} params.error - The optional + * geolocation error to emulate. Matches the + * `emulation.GeolocationPositionError `_ + * value. Mutually exclusive with `params.coordinates`. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing contexts + * to set the geolocation override on. It should be either an + * array of Context objects (window or browsing context id), or + * null. If null or omitted, the override will be set on the + * current browsing context. + * @returns {Promise} Resolves when the geolocation + * override is successfully set. + */ + set_geolocation_override: function (params) { + // Ensure the bidi feature is enabled before calling the internal method + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.emulation.set_geolocation_override( + params); + }, + /** + * Overrides the locale for the specified browsing contexts. + * Matches the `emulation.setLocaleOverride + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.emulation.set_locale_override({ + * locale: 'de-DE' + * }); + * + * @param {object} params - Parameters for the command. + * @param {null|string} params.locale - The optional + * locale to set. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing contexts + * to set the locale override on. It should be either an array + * of Context objects (window or browsing context id), or null. + * If null or omitted, the override will be set on the current + * browsing context. + * @returns {Promise} Resolves when the locale override + * is successfully set. + */ + set_locale_override: function (params) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.emulation.set_locale_override( + params); + }, + /** + * Overrides the screen orientation for the specified browsing + * contexts. + * Matches the `emulation.setScreenOrientationOverride + * `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.emulation.set_screen_orientation_override({ + * screenOrientation: { + * natural: 'portrait', + * type: 'landscape-secondary' + * } + * }); + * + * @param {object} params - Parameters for the command. + * @param {null|object} params.screenOrientation - The optional + * screen orientation. Matches the + * `emulation.ScreenOrientation `_ + * type. If null or omitted, the override will be removed. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing contexts + * to set the screen orientation override on. It should be + * either an array of Context objects (window or browsing + * context id), or null. If null or omitted, the override will + * be set on the current browsing context. + * @returns {Promise} Resolves when the screen orientation + * override is successfully set. + */ + set_screen_orientation_override: function (params) { + // Ensure the bidi feature is enabled before calling the internal method + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.emulation.set_screen_orientation_override( + params); + }, + }, + /** + * `log `_ module. + */ + log: { + entry_added: { + /** + * @typedef {object} LogEntryAdded `log.entryAdded `_ event. + */ + + /** + * Subscribes to the event. Events will be emitted only if + * there is a subscription for the event. This method does + * not add actual listeners. To listen to the event, use the + * `on` or `once` methods. The buffered events will be + * emitted before the command promise is resolved. + * + * @param {object} [params] Parameters for the subscription. + * @param {null|Array.<(Context)>} [params.contexts] The + * optional contexts parameter specifies which browsing + * contexts to subscribe to the event on. It should be + * either an array of Context objects, or null. If null, the + * event will be subscribed to globally. If omitted, the + * event will be subscribed to on the current browsing + * context. + * @returns {Promise<(function(): Promise)>} Callback + * for unsubscribing from the created subscription. + */ + subscribe: async function (params = {}) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.log.entry_added.subscribe(params); + }, + /** + * Adds an event listener for the event. + * + * @param {function(LogEntryAdded): void} callback The + * callback to be called when the event is emitted. The + * callback is called with the event object as a parameter. + * @returns {function(): void} A function that removes the + * added event listener when called. + */ + on: function (callback) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.log.entry_added.on(callback); + }, + /** + * Adds an event listener for the event that is only called + * once and removed afterward. + * + * @return {Promise} The promise which is resolved + * with the event object when the event is emitted. + */ + once: function () { + assertBidiIsEnabled(); + return new Promise(resolve => { + const remove_handler = window.test_driver_internal.bidi.log.entry_added.on( + event => { + resolve(event); + remove_handler(); + }); + }); + }, + } + }, + /** + * `permissions `_ module. + */ + permissions: { + /** + * Sets the state of a permission + * + * This function causes permission requests and queries for the status + * of a certain permission type (e.g. "push", or "background-fetch") to + * always return ``state`` for the specific origin. + * + * Matches the `permissions.setPermission `_ + * WebDriver BiDi command. + * + * @example + * await test_driver.bidi.permissions.set_permission({ + * {name: "geolocation"}, + * state: "granted", + * }); + * + * @param {object} params - Parameters for the command. + * @param {PermissionDescriptor} params.descriptor - a `PermissionDescriptor + * `_ + * or derived object. + * @param {PermissionState} params.state - a `PermissionState + * `_ + * value. + * @param {string} [params.origin] - an optional `origin` string to set the + * permission for. If omitted, the permission is set for the + * current window's origin. + * @returns {Promise} fulfilled after the permission is set, or rejected if setting + * the permission fails. + */ + set_permission: function (params) { + assertBidiIsEnabled(); + return window.test_driver_internal.bidi.permissions.set_permission( + params); + } + } + }, + /** * Set the context in which testharness.js is loaded * @@ -112,8 +1050,8 @@ let wait_click = new Promise(resolve => button.addEventListener("click", resolve)); return test_driver.click(button) - .then(wait_click) - .then(function() { + .then(() => wait_click) + .then(() => { button.remove(); if (typeof action === "function") { @@ -124,11 +1062,10 @@ }, /** - * Triggers a user-initiated click + * Triggers a user-initiated mouse click. * - * If ``element`` isn't inside the - * viewport, it will be scrolled into view before the click - * occurs. + * If ``element`` isn't inside the viewport, it will be + * scrolled into view before the click occurs. * * If ``element`` is from a different browsing context, the * command will be run in that context. @@ -322,7 +1259,7 @@ /** * Minimizes the browser window. * - * Matches the the behaviour of the `Minimize + * Matches the behaviour of the `Minimize * `_ * WebDriver command * @@ -357,6 +1294,25 @@ return window.test_driver_internal.set_window_rect(rect, context); }, + /** + * Gets a rect with the size and position on the screen from the current window state. + * + * Matches the behaviour of the `Get Window Rect + * `_ + * WebDriver command + * + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * + * @returns {Promise} fulfilled after the window rect is returned, or rejected + * in cases the WebDriver command returns errors. Returns a + * `WindowRect `_ + */ + get_window_rect: function(context=null) { + return window.test_driver_internal.get_window_rect(context); + }, + /** * Send a sequence of actions * @@ -647,7 +1603,7 @@ * * This function places `Secure Payment * Confirmation `_ into - * an automated 'autoaccept' or 'autoreject' mode, to allow testing + * an automated 'autoAccept' or 'autoReject' mode, to allow testing * without user interaction with the transaction UX prompt. * * Matches the `Set SPC Transaction Mode @@ -667,8 +1623,8 @@ * @param {String} mode - The `transaction mode * `_ * to set. Must be one of "``none``", - * "``autoaccept``", or - * "``autoreject``". + * "``autoAccept``", or + * "``autoReject``". * @param {WindowProxy} context - Browsing context in which * to run the call, or null for the current * browsing context. @@ -680,6 +1636,42 @@ return window.test_driver_internal.set_spc_transaction_mode(mode, context); }, + /** + * Sets the current registration automation mode for Register Protocol Handlers. + * + * This function places `Register Protocol Handlers + * `_ into + * an automated 'autoAccept' or 'autoReject' mode, to allow testing + * without user interaction with the transaction UX prompt. + * + * Matches the `Set Register Protocol Handler Mode + * `_ + * WebDriver command. + * + * @example + * await test_driver.set_rph_registration_mode("autoAccept"); + * test.add_cleanup(() => { + * return test_driver.set_rph_registration_mode("none"); + * }); + * + * navigator.registerProtocolHandler('web+soup', 'soup?url=%s'); + * + * @param {String} mode - The `registration mode + * `_ + * to set. Must be one of "``none``", + * "``autoAccept``", or + * "``autoReject``". + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * + * @returns {Promise} Fulfilled after the transaction mode has been set, + * or rejected if setting the mode fails. + */ + set_rph_registration_mode: function(mode, context=null) { + return window.test_driver_internal.set_rph_registration_mode(mode, context); + }, + /** * Cancels the Federated Credential Management dialog * @@ -968,6 +1960,215 @@ */ get_virtual_sensor_information: function(sensor_type, context=null) { return window.test_driver_internal.get_virtual_sensor_information(sensor_type, context); + }, + + /** + * Overrides device posture set by hardware. + * + * Matches the `Set device posture + * `_ + * WebDriver command. + * + * @param {String} posture - A `DevicePostureType + * `_ + * either "continuous" or "folded". + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled when device posture is set. + * Rejected in case the WebDriver command errors out + * (including if a device posture of the given type + * does not exist). + */ + set_device_posture: function(posture, context=null) { + return window.test_driver_internal.set_device_posture(posture, context); + }, + + /** + * Removes device posture override and returns device posture control + * back to hardware. + * + * Matches the `Clear device posture + * `_ + * WebDriver command. + * + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled after the device posture override has + * been removed. Rejected in case the WebDriver + * command errors out. + */ + clear_device_posture: function(context=null) { + return window.test_driver_internal.clear_device_posture(context); + }, + + /** + * Runs the `bounce tracking timer algorithm + * `_, + * which removes all hosts from the stateful bounce tracking map, without + * regard for the bounce tracking grace period and returns a list of the + * deleted hosts. + * + * Matches the `Run Bounce Tracking Mitigations + * `_ + * WebDriver command. + * + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * @returns {Promise} Fulfilled after the bounce tracking timer + * algorithm has finished running. Returns an array + * of all hosts that were in the stateful bounce + * tracking map before deletion occurred. + */ + run_bounce_tracking_mitigations: function (context = null) { + return window.test_driver_internal.run_bounce_tracking_mitigations(context); + }, + + /** + * Creates a virtual pressure source. + * + * Matches the `Create virtual pressure source + * `_ + * WebDriver command. + * + * @param {String} source_type - A `virtual pressure source type + * `_ + * such as "cpu". + * @param {Object} [metadata={}] - Optional parameters described + * in `Create virtual pressure source + * `_. + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled when virtual pressure source is created. + * Rejected in case the WebDriver command errors out + * (including if a virtual pressure source of the + * same type already exists). + */ + create_virtual_pressure_source: function(source_type, metadata={}, context=null) { + return window.test_driver_internal.create_virtual_pressure_source(source_type, metadata, context); + }, + + /** + * Causes a virtual pressure source to report a new reading. + * + * Matches the `Update virtual pressure source + * `_ + * WebDriver command. + * + * @param {String} source_type - A `virtual pressure source type + * `_ + * such as "cpu". + * @param {String} sample - A `virtual pressure state + * `_ + * such as "critical". + * @param {number} own_contribution_estimate - Optional, A `virtual own contribution estimate` + * `_ + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled after the reading update reaches the + * virtual pressure source. Rejected in case the + * WebDriver command errors out (including if a + * virtual pressure source of the given type does not + * exist). + */ + update_virtual_pressure_source: function(source_type, sample, own_contribution_estimate, context=null) { + return window.test_driver_internal.update_virtual_pressure_source(source_type, sample, own_contribution_estimate, context); + }, + + /** + * Removes created virtual pressure source. + * + * Matches the `Delete virtual pressure source + * `_ + * WebDriver command. + * + * @param {String} source_type - A `virtual pressure source type + * `_ + * such as "cpu". + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled after the virtual pressure source has + * been removed or if a pressure source of the given + * type does not exist. Rejected in case the + * WebDriver command errors out. + */ + remove_virtual_pressure_source: function(source_type, context=null) { + return window.test_driver_internal.remove_virtual_pressure_source(source_type, context); + }, + + /** + * Sets which hashes are considered k-anonymous for the Protected + * Audience interest group with specified `owner` and `name`. + * + * Matches the `Set Protected Audience K-Anonymity + * + * WebDriver command. + * + * @param {String} owner - Origin of the owner of the interest group + * to modify + * @param {String} name - Name of the interest group to modify + * @param {Array} hashes - An array of strings, each of which is a + * base64 ecoded hash to consider k-anonymous. + * + * @returns {Promise} Fulfilled after the k-anonymity status for the + * specified Protected Audience interest group has + * been updated. + * + */ + set_protected_audience_k_anonymity: function(owner, name, hashes, context = null) { + return window.test_driver_internal.set_protected_audience_k_anonymity(owner, name, hashes, context); + }, + + /** + * Overrides the display features provided by the hardware so the viewport segments + * can be emulated. + * + * Matches the `Set display features + * `_ + * WebDriver command. + * + * @param {Array} features - An array of `DisplayFeatureOverride + * `. + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled when the display features are set. + * Rejected in case the WebDriver command errors out + * (including if the array is malformed). + */ + set_display_features: function(features, context=null) { + return window.test_driver_internal.set_display_features(features, context); + }, + + /** + * Removes display features override and returns the control + * back to hardware. + * + * Matches the `Clear display features + * `_ + * WebDriver command. + * + * @param {WindowProxy} [context=null] - Browsing context in which to + * run the call, or null for the + * current browsing context. + * + * @returns {Promise} Fulfilled after the display features override has + * been removed. Rejected in case the WebDriver + * command errors out. + */ + clear_display_features: function(context=null) { + return window.test_driver_internal.clear_display_features(context); } }; @@ -980,6 +2181,99 @@ */ in_automation: false, + bidi: { + bluetooth: { + handle_request_device_prompt: function() { + throw new Error( + 'bidi.bluetooth.handle_request_device_prompt is not implemented by testdriver-vendor.js'); + }, + simulate_adapter: function () { + throw new Error( + "bidi.bluetooth.simulate_adapter is not implemented by testdriver-vendor.js"); + }, + disable_simulation: function () { + throw new Error( + "bidi.bluetooth.disable_simulation is not implemented by testdriver-vendor.js"); + }, + simulate_preconnected_peripheral: function() { + throw new Error( + 'bidi.bluetooth.simulate_preconnected_peripheral is not implemented by testdriver-vendor.js'); + }, + request_device_prompt_updated: { + async subscribe() { + throw new Error( + 'bidi.bluetooth.request_device_prompt_updated.subscribe is not implemented by testdriver-vendor.js'); + }, + on() { + throw new Error( + 'bidi.bluetooth.request_device_prompt_updated.on is not implemented by testdriver-vendor.js'); + } + }, + gatt_connection_attempted: { + async subscribe() { + throw new Error( + 'bidi.bluetooth.gatt_connection_attempted.subscribe is not implemented by testdriver-vendor.js'); + }, + on() { + throw new Error( + 'bidi.bluetooth.gatt_connection_attempted.on is not implemented by testdriver-vendor.js'); + } + }, + characteristic_event_generated: { + async subscribe() { + throw new Error( + 'bidi.bluetooth.characteristic_event_generated.subscribe is not implemented by testdriver-vendor.js'); + }, + on() { + throw new Error( + 'bidi.bluetooth.characteristic_event_generated.on is not implemented by testdriver-vendor.js'); + } + }, + descriptor_event_generated: { + async subscribe() { + throw new Error( + 'bidi.bluetooth.descriptor_event_generated.subscribe is not implemented by testdriver-vendor.js'); + }, + on() { + throw new Error( + 'bidi.bluetooth.descriptor_event_generated.on is not implemented by testdriver-vendor.js'); + } + } + }, + emulation: { + set_geolocation_override: function (params) { + throw new Error( + "bidi.emulation.set_geolocation_override is not implemented by testdriver-vendor.js"); + }, + set_locale_override: function (params) { + throw new Error( + "bidi.emulation.set_locale_override is not implemented by testdriver-vendor.js"); + }, + set_screen_orientation_override: function (params) { + throw new Error( + "bidi.emulation.set_screen_orientation_override is not implemented by testdriver-vendor.js"); + } + }, + log: { + entry_added: { + async subscribe() { + throw new Error( + "bidi.log.entry_added.subscribe is not implemented by testdriver-vendor.js"); + }, + on() { + throw new Error( + "bidi.log.entry_added.on is not implemented by testdriver-vendor.js"); + } + } + }, + permissions: { + async set_permission() { + throw new Error( + "bidi.permissions.set_permission() is not implemented by testdriver-vendor.js"); + } + } + }, + async click(element, coords) { if (this.in_automation) { throw new Error("click() is not implemented by testdriver-vendor.js"); @@ -1002,6 +2296,14 @@ throw new Error("get_named_cookie() is not implemented by testdriver-vendor.js"); }, + async get_computed_role(element) { + throw new Error("get_computed_role is a testdriver.js function which cannot be run in this context."); + }, + + async get_computed_name(element) { + throw new Error("get_computed_name is a testdriver.js function which cannot be run in this context."); + }, + async send_keys(element, keys) { if (this.in_automation) { throw new Error("send_keys() is not implemented by testdriver-vendor.js"); @@ -1046,6 +2348,10 @@ throw new Error("set_window_rect() is not implemented by testdriver-vendor.js"); }, + async get_window_rect(context=null) { + throw new Error("get_window_rect() is not implemented by testdriver-vendor.js"); + }, + async action_sequence(actions, context=null) { throw new Error("action_sequence() is not implemented by testdriver-vendor.js"); }, @@ -1094,6 +2400,10 @@ throw new Error("set_spc_transaction_mode() is not implemented by testdriver-vendor.js"); }, + set_rph_registration_mode: function(mode, context=null) { + return Promise.reject(new Error("unimplemented")); + }, + async cancel_fedcm_dialog(context=null) { throw new Error("cancel_fedcm_dialog() is not implemented by testdriver-vendor.js"); }, @@ -1140,6 +2450,42 @@ async get_virtual_sensor_information(sensor_type, context=null) { throw new Error("get_virtual_sensor_information() is not implemented by testdriver-vendor.js"); + }, + + async set_device_posture(posture, context=null) { + throw new Error("set_device_posture() is not implemented by testdriver-vendor.js"); + }, + + async clear_device_posture(context=null) { + throw new Error("clear_device_posture() is not implemented by testdriver-vendor.js"); + }, + + async run_bounce_tracking_mitigations(context=null) { + throw new Error("run_bounce_tracking_mitigations() is not implemented by testdriver-vendor.js"); + }, + + async create_virtual_pressure_source(source_type, metadata={}, context=null) { + throw new Error("create_virtual_pressure_source() is not implemented by testdriver-vendor.js"); + }, + + async update_virtual_pressure_source(source_type, sample, own_contribution_estimate, context=null) { + throw new Error("update_virtual_pressure_source() is not implemented by testdriver-vendor.js"); + }, + + async remove_virtual_pressure_source(source_type, context=null) { + throw new Error("remove_virtual_pressure_source() is not implemented by testdriver-vendor.js"); + }, + + async set_protected_audience_k_anonymity(owner, name, hashes, context=null) { + throw new Error("set_protected_audience_k_anonymity() is not implemented by testdriver-vendor.js"); + }, + + async set_display_features(features, context=null) { + throw new Error("set_display_features() is not implemented by testdriver-vendor.js"); + }, + + async clear_display_features(context=null) { + throw new Error("clear_display_features() is not implemented by testdriver-vendor.js"); } }; })(); diff --git a/test/fixtures/wpt/resources/testharness-shadowrealm-audioworkletprocessor.js b/test/fixtures/wpt/resources/testharness-shadowrealm-audioworkletprocessor.js new file mode 100644 index 00000000000000..a87d9130908f86 --- /dev/null +++ b/test/fixtures/wpt/resources/testharness-shadowrealm-audioworkletprocessor.js @@ -0,0 +1,52 @@ +/** + * AudioWorkletProcessor intended for hosting a ShadowRealm and running a test + * inside of that ShadowRealm. + */ +globalThis.TestRunner = class TestRunner extends AudioWorkletProcessor { + constructor() { + super(); + this.createShadowRealmAndStartTests(); + } + + /** + * Fetch adaptor function intended as a drop-in replacement for fetchAdaptor() + * (see testharness-shadowrealm-outer.js), but it does not assume fetch() is + * present in the realm. Instead, it relies on setupFakeFetchOverMessagePort() + * having been called on the port on the other side of this.port's channel. + */ + fetchOverPortExecutor(resource) { + return (resolve, reject) => { + const listener = (event) => { + if (typeof event.data !== "string" || !event.data.startsWith("fetchResult::")) { + return; + } + + const result = event.data.slice("fetchResult::".length); + if (result.startsWith("success::")) { + resolve(result.slice("success::".length)); + } else { + reject(result.slice("fail::".length)); + } + + this.port.removeEventListener("message", listener); + } + this.port.addEventListener("message", listener); + this.port.start(); + this.port.postMessage(`fetchRequest::${resource}`); + } + } + + /** + * Async method, which is patched over in + * (test).any.audioworklet-shadowrealm.js; see serve.py + */ + async createShadowRealmAndStartTests() { + throw new Error("Forgot to overwrite this method!"); + } + + /** Overrides AudioWorkletProcessor.prototype.process() */ + process() { + return false; + } +}; +registerProcessor("test-runner", TestRunner); diff --git a/test/fixtures/wpt/resources/testharness-shadowrealm-inner.js b/test/fixtures/wpt/resources/testharness-shadowrealm-inner.js new file mode 100644 index 00000000000000..a9bdf9fc76ceea --- /dev/null +++ b/test/fixtures/wpt/resources/testharness-shadowrealm-inner.js @@ -0,0 +1,38 @@ +// testharness file with ShadowRealm utilities to be imported inside ShadowRealm + +/** + * Set up all properties on the ShadowRealm's global object that tests will + * expect to be present. + * + * @param {string} queryString - string to use as value for location.search, + * used for subsetting some tests + * @param {function} fetchAdaptor - a function that takes a resource URI and + * returns a function which itself takes a (resolve, reject) pair from the + * hosting realm, and calls resolve with the text result of fetching the + * resource, or reject with a string indicating the error that occurred + */ +globalThis.setShadowRealmGlobalProperties = function (queryString, fetchAdaptor) { + globalThis.fetch_json = (resource) => { + const executor = fetchAdaptor(resource); + return new Promise(executor).then((s) => JSON.parse(s)); + }; + + // Used only by idlharness.js + globalThis.fetch_spec = (spec) => { + const resource = `/interfaces/${spec}.idl`; + const executor = fetchAdaptor(resource); + return new Promise(executor).then( + idl => ({ spec, idl }), + () => { + throw new IdlHarnessError(`Error fetching ${resource}.`); + }); + } + + globalThis.location = { search: queryString }; +}; + +globalThis.GLOBAL = { + isWindow: function() { return false; }, + isWorker: function() { return false; }, + isShadowRealm: function() { return true; }, +}; diff --git a/test/fixtures/wpt/resources/testharness-shadowrealm-outer.js b/test/fixtures/wpt/resources/testharness-shadowrealm-outer.js new file mode 100644 index 00000000000000..1affa72c2c56a2 --- /dev/null +++ b/test/fixtures/wpt/resources/testharness-shadowrealm-outer.js @@ -0,0 +1,151 @@ +// testharness file with ShadowRealm utilities to be imported in the realm +// hosting the ShadowRealm + +/** + * Convenience function for evaluating some async code in the ShadowRealm and + * waiting for the result. + * + * In case of error, this function intentionally exposes the stack trace (if it + * is available) to the hosting realm, for debugging purposes. + * + * @param {ShadowRealm} realm - the ShadowRealm to evaluate the code in + * @param {string} asyncBody - the code to evaluate; will be put in the body of + * an async function, and must return a value explicitly if a value is to be + * returned to the hosting realm. + */ +globalThis.shadowRealmEvalAsync = function (realm, asyncBody) { + return new Promise(realm.evaluate(` + (resolve, reject) => { + (async () => { + ${asyncBody} + })().then(resolve, (e) => reject(e.toString() + "\\n" + (e.stack || ""))); + } + `)); +}; + +/** + * Convenience adaptor function for fetch() that can be passed to + * setShadowRealmGlobalProperties() (see testharness-shadowrealm-inner.js). + * Used to adapt the hosting realm's fetch(), if present, to fetch a resource + * and pass its text through the callable boundary to the ShadowRealm. + */ +globalThis.fetchAdaptor = (resource) => (resolve, reject) => { + fetch(resource) + .then(res => res.text()) + .then(resolve, (e) => reject(e.toString())); +}; + +let workerMessagePortPromise; +/** + * Used when the hosting realm is a worker. This value is a Promise that + * resolves to a function that posts a message to the worker's message port, + * just like postMessage(). The message port is only available asynchronously in + * SharedWorkers and ServiceWorkers. + */ +globalThis.getPostMessageFunc = async function () { + if (typeof postMessage === "function") { + return postMessage; // postMessage available directly in dedicated worker + } + + if (workerMessagePortPromise) { + return await workerMessagePortPromise; + } + + throw new Error("getPostMessageFunc is intended for Worker scopes"); +} + +// Port available asynchronously in shared worker, but not via an async func +let savedResolver; +if (globalThis.constructor.name === "SharedWorkerGlobalScope") { + workerMessagePortPromise = new Promise((resolve) => { + savedResolver = resolve; + }); + addEventListener("connect", function (event) { + const port = event.ports[0]; + savedResolver(port.postMessage.bind(port)); + }); +} else if (globalThis.constructor.name === "ServiceWorkerGlobalScope") { + workerMessagePortPromise = new Promise((resolve) => { + savedResolver = resolve; + }); + addEventListener("message", (e) => { + if (typeof e.data === "object" && e.data !== null && e.data.type === "connect") { + const client = e.source; + savedResolver(client.postMessage.bind(client)); + } + }); +} + +/** + * Used when the hosting realm does not permit dynamic import, e.g. in + * ServiceWorkers or AudioWorklets. Requires an adaptor function such as + * fetchAdaptor() above, or an equivalent if fetch() is not present in the + * hosting realm. + * + * @param {ShadowRealm} realm - the ShadowRealm in which to setup a + * fakeDynamicImport() global function. + * @param {function} adaptor - an adaptor function that does what fetchAdaptor() + * does. + */ +globalThis.setupFakeDynamicImportInShadowRealm = function(realm, adaptor) { + function fetchModuleTextExecutor(url) { + return (resolve, reject) => { + new Promise(adaptor(url)) + .then(text => realm.evaluate(text + ";\nundefined")) + .then(resolve, (e) => reject(e.toString())); + } + } + + realm.evaluate(` + (fetchModuleTextExecutor) => { + globalThis.fakeDynamicImport = function (url) { + return new Promise(fetchModuleTextExecutor(url)); + } + } + `)(fetchModuleTextExecutor); +}; + +/** + * Used when the hosting realm does not expose fetch(), i.e. in worklets. The + * port on the other side of the channel needs to send messages starting with + * 'fetchRequest::' and listen for messages starting with 'fetchResult::'. See + * testharness-shadowrealm-audioworkletprocessor.js. + * + * @param {port} MessagePort - the message port on which to listen for fetch + * requests + */ +globalThis.setupFakeFetchOverMessagePort = function (port) { + port.addEventListener("message", (event) => { + if (typeof event.data !== "string" || !event.data.startsWith("fetchRequest::")) { + return; + } + + fetch(event.data.slice("fetchRequest::".length)) + .then(res => res.text()) + .then( + text => port.postMessage(`fetchResult::success::${text}`), + error => port.postMessage(`fetchResult::fail::${error}`), + ); + }); + port.start(); +} + +/** + * Returns a message suitable for posting with postMessage() that will signal to + * the test harness that the tests are finished and there was an error in the + * setup code. + * + * @param {message} any - error + */ +globalThis.createSetupErrorResult = function (message) { + return { + type: "complete", + tests: [], + asserts: [], + status: { + status: 1, // TestsStatus.ERROR, + message: String(message), + stack: typeof message === "object" && message !== null && "stack" in message ? message.stack : undefined, + }, + }; +}; diff --git a/test/fixtures/wpt/resources/testharness.js b/test/fixtures/wpt/resources/testharness.js index 126ae96423bc83..f495b62458ba75 100644 --- a/test/fixtures/wpt/resources/testharness.js +++ b/test/fixtures/wpt/resources/testharness.js @@ -88,10 +88,15 @@ status: harness_status.structured_clone(), asserts: asserts.map(assert => assert.structured_clone())}); }] - } + }; on_event(window, 'load', function() { + setTimeout(() => { this_obj.all_loaded = true; + if (tests.all_done()) { + tests.complete(); + } + },0); }); on_event(window, 'message', function(event) { @@ -198,7 +203,7 @@ } }); this.message_events = new_events; - } + }; WindowTestEnvironment.prototype.next_default_test_name = function() { var suffix = this.name_counter > 0 ? " " + this.name_counter : ""; @@ -220,8 +225,8 @@ WindowTestEnvironment.prototype.test_timeout = function() { var metas = document.getElementsByTagName("meta"); for (var i = 0; i < metas.length; i++) { - if (metas[i].name == "timeout") { - if (metas[i].content == "long") { + if (metas[i].name === "timeout") { + if (metas[i].content === "long") { return settings.harness_timeout.long; } break; @@ -482,7 +487,7 @@ this.all_loaded = false; this.on_loaded_callback = null; Promise.resolve().then(function() { - this.all_loaded = true + this.all_loaded = true; if (this.on_loaded_callback) { this.on_loaded_callback(); } @@ -558,7 +563,7 @@ // The worker object may be from another execution context, // so do not use instanceof here. return 'ServiceWorker' in global_scope && - Object.prototype.toString.call(worker) == '[object ServiceWorker]'; + Object.prototype.toString.call(worker) === '[object ServiceWorker]'; } var seen_func_name = Object.create(null); @@ -600,7 +605,7 @@ /** * @callback TestFunction - * @param {Test} test - The test currently being run. + * @param {Test} test - The test currnetly being run. * @param {Any[]} args - Additional args to pass to function. * */ @@ -805,7 +810,7 @@ return bring_promise_to_current_realm(promise) .then(test.unreached_func("Should have rejected: " + description)) .catch(function(e) { - assert_throws_js_impl(constructor, function() { throw e }, + assert_throws_js_impl(constructor, function() { throw e; }, description, "promise_rejects_js"); }); } @@ -852,16 +857,64 @@ promise = promiseOrConstructor; description = descriptionOrPromise; assert(maybeDescription === undefined, - "Too many args pased to no-constructor version of promise_rejects_dom"); + "Too many args passed to no-constructor version of promise_rejects_dom, or accidentally explicitly passed undefined"); } return bring_promise_to_current_realm(promise) .then(test.unreached_func("Should have rejected: " + description)) .catch(function(e) { - assert_throws_dom_impl(type, function() { throw e }, description, + assert_throws_dom_impl(type, function() { throw e; }, description, "promise_rejects_dom", constructor); }); } +/** + * Assert that a `Promise` is rejected with a `QuotaExceededError` with the + * expected values. + * + * For the remaining arguments, there are two ways of calling + * `promise_rejects_quotaexceedederror`: + * + * 1) If the `QuotaExceededError` is expected to come from the + * current global, the second argument should be the promise + * expected to reject, the third and a fourth the expected + * `requested` and `quota` property values, and the fifth, + * optional, argument is the assertion description. + * + * 2) If the `QuotaExceededError` is expected to come from some + * other global, the second argument should be the + * `QuotaExceededError` constructor from that global, the third + * argument should be the promise expected to reject, the fourth + * and fifth the expected `requested` and `quota` property + * values, and the sixth, optional, argument is the assertion + * description. + * + */ + function promise_rejects_quotaexceedederror(test, promiseOrConstructor, requestedOrPromise, quotaOrRequested, descriptionOrQuota, maybeDescription) + { + let constructor, promise, requested, quota, description; + if (typeof promiseOrConstructor === "function" && + promiseOrConstructor.name === "QuotaExceededError") { + constructor = promiseOrConstructor; + promise = requestedOrPromise; + requested = quotaOrRequested; + quota = descriptionOrQuota; + description = maybeDescription; + } else { + constructor = self.QuotaExceededError; + promise = promiseOrConstructor; + requested = requestedOrPromise; + quota = quotaOrRequested; + description = descriptionOrQuota; + assert(maybeDescription === undefined, + "Too many args passed to no-constructor version of promise_rejects_quotaexceedederror"); + } + return bring_promise_to_current_realm(promise) + .then(test.unreached_func("Should have rejected: " + description)) + .catch(function(e) { + assert_throws_quotaexceedederror_impl(function() { throw e; }, requested, quota, description, "promise_rejects_quotaexceedederror", constructor); + }); + } + /** * Assert that a Promise is rejected with the provided value. * @@ -876,7 +929,7 @@ return bring_promise_to_current_realm(promise) .then(test.unreached_func("Should have rejected: " + description)) .catch(function(e) { - assert_throws_exactly_impl(exception, function() { throw e }, + assert_throws_exactly_impl(exception, function() { throw e; }, description, "promise_rejects_exactly"); }); } @@ -902,7 +955,7 @@ */ function EventWatcher(test, watchedNode, eventTypes, timeoutPromise) { - if (typeof eventTypes == 'string') { + if (typeof eventTypes === 'string') { eventTypes = [eventTypes]; } @@ -967,7 +1020,7 @@ if (waitingFor) { return Promise.reject('Already waiting for an event or events'); } - if (typeof types == 'string') { + if (typeof types === 'string') { types = [types]; } if (options && options.record && options.record === 'all') { @@ -982,7 +1035,7 @@ // This should always fail, otherwise we should have // resolved the promise. - assert_true(waitingFor.types.length == 0, + assert_true(waitingFor.types.length === 0, 'Timed out waiting for ' + waitingFor.types.join(', ')); var result = recordedEvents; recordedEvents = null; @@ -1006,13 +1059,13 @@ /** * Stop listening for events */ - function stop_watching() { + this.stop_watching = function() { for (var i = 0; i < eventTypes.length; i++) { watchedNode.removeEventListener(eventTypes[i], eventHandler, false); } }; - test._add_cleanup(stop_watching); + test._add_cleanup(this.stop_watching); return this; } @@ -1089,7 +1142,7 @@ { if (typeof func !== "function") { tests.set_status(tests.status.ERROR, - "promise_test invoked without a function"); + "`promise_setup` invoked without a function"); tests.complete(); return; } @@ -1127,7 +1180,7 @@ * * Typically this function is called implicitly on page load; it's * only necessary for users to call this when either the - * ``explicit_done`` or ``single_page`` properties have been set + * ``explicit_done`` or ``single_test`` properties have been set * via the :js:func:`setup` function. * * For single page tests this marks the test as complete and sets its status. @@ -1196,6 +1249,23 @@ object.addEventListener(event, callback, false); } + // Internal helper function to provide timeout-like functionality in + // environments where there is no setTimeout(). (No timeout ID or + // clearTimeout().) + function fake_set_timeout(callback, delay) { + var p = Promise.resolve(); + var start = Date.now(); + var end = start + delay; + function check() { + if ((end - Date.now()) > 0) { + p.then(check); + } else { + callback(); + } + } + p.then(check); + } + /** * Global version of :js:func:`Test.step_timeout` for use in single page tests. * @@ -1207,7 +1277,8 @@ function step_timeout(func, timeout) { var outer_this = this; var args = Array.prototype.slice.call(arguments, 2); - return setTimeout(function() { + var local_set_timeout = typeof global_scope.setTimeout === "undefined" ? fake_set_timeout : setTimeout; + return local_set_timeout(function() { func.apply(outer_this, args); }, timeout * tests.timeout_multiplier); } @@ -1217,6 +1288,7 @@ expose(promise_test, 'promise_test'); expose(promise_rejects_js, 'promise_rejects_js'); expose(promise_rejects_dom, 'promise_rejects_dom'); + expose(promise_rejects_quotaexceedederror, 'promise_rejects_quotaexceedederror'); expose(promise_rejects_exactly, 'promise_rejects_exactly'); expose(generate_tests, 'generate_tests'); expose(setup, 'setup'); @@ -1307,6 +1379,15 @@ "0xffff": "uffff", }; + const formatEscapeMap = { + "\\": "\\\\", + '"': '\\"' + }; + for (const p in replacements) { + formatEscapeMap[String.fromCharCode(p)] = "\\" + replacements[p]; + } + const formatEscapePattern = new RegExp(`[${Object.keys(formatEscapeMap).map(k => k === "\\" ? "\\\\" : k).join("")}]`, "g"); + /** * Convert a value to a nice, human-readable string * @@ -1357,12 +1438,7 @@ switch (typeof val) { case "string": - val = val.replace(/\\/g, "\\\\"); - for (var p in replacements) { - var replace = "\\" + replacements[p]; - val = val.replace(RegExp(String.fromCharCode(p), "g"), replace); - } - return '"' + val.replace(/"/g, '\\"') + '"'; + return '"' + val.replace(formatEscapePattern, match => formatEscapeMap[match]) + '"'; case "boolean": case "undefined": return String(val); @@ -1373,6 +1449,8 @@ return "-0"; } return String(val); + case "bigint": + return String(val) + 'n'; case "object": if (val === null) { return "null"; @@ -1396,11 +1474,11 @@ case Node.COMMENT_NODE: return "Comment node "; case Node.DOCUMENT_NODE: - return "Document node with " + val.childNodes.length + (val.childNodes.length == 1 ? " child" : " children"); + return "Document node with " + val.childNodes.length + (val.childNodes.length === 1 ? " child" : " children"); case Node.DOCUMENT_TYPE_NODE: return "DocumentType node"; case Node.DOCUMENT_FRAGMENT_NODE: - return "DocumentFragment node with " + val.childNodes.length + (val.childNodes.length == 1 ? " child" : " children"); + return "DocumentFragment node with " + val.childNodes.length + (val.childNodes.length === 1 ? " child" : " children"); default: return "Node object of unknown type"; } @@ -1744,20 +1822,25 @@ /** * Assert that ``actual`` is a number less than ``expected``. * - * @param {number} actual - Test value. - * @param {number} expected - Number that ``actual`` must be less than. + * @param {number|bigint} actual - Test value. + * @param {number|bigint} expected - Value that ``actual`` must be less than. * @param {string} [description] - Description of the condition being tested. */ function assert_less_than(actual, expected, description) { /* - * Test if a primitive number is less than another + * Test if a primitive number (or bigint) is less than another */ - assert(typeof actual === "number", + assert(typeof actual === "number" || typeof actual === "bigint", "assert_less_than", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof expected, + "assert_less_than", description, + "expected a ${type_expected} but got a ${type_actual}", + {type_expected:typeof expected, type_actual:typeof actual}); + assert(actual < expected, "assert_less_than", description, "expected a number less than ${expected} but got ${actual}", @@ -1768,20 +1851,25 @@ /** * Assert that ``actual`` is a number greater than ``expected``. * - * @param {number} actual - Test value. - * @param {number} expected - Number that ``actual`` must be greater than. + * @param {number|bigint} actual - Test value. + * @param {number|bigint} expected - Value that ``actual`` must be greater than. * @param {string} [description] - Description of the condition being tested. */ function assert_greater_than(actual, expected, description) { /* - * Test if a primitive number is greater than another + * Test if a primitive number (or bigint) is greater than another */ - assert(typeof actual === "number", + assert(typeof actual === "number" || typeof actual === "bigint", "assert_greater_than", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof expected, + "assert_greater_than", description, + "expected a ${type_expected} but got a ${type_actual}", + {type_expected:typeof expected, type_actual:typeof actual}); + assert(actual > expected, "assert_greater_than", description, "expected a number greater than ${expected} but got ${actual}", @@ -1793,21 +1881,31 @@ * Assert that ``actual`` is a number greater than ``lower`` and less * than ``upper`` but not equal to either. * - * @param {number} actual - Test value. - * @param {number} lower - Number that ``actual`` must be greater than. - * @param {number} upper - Number that ``actual`` must be less than. + * @param {number|bigint} actual - Test value. + * @param {number|bigint} lower - Value that ``actual`` must be greater than. + * @param {number|bigint} upper - Value that ``actual`` must be less than. * @param {string} [description] - Description of the condition being tested. */ function assert_between_exclusive(actual, lower, upper, description) { /* - * Test if a primitive number is between two others + * Test if a primitive number (or bigint) is between two others */ - assert(typeof actual === "number", + assert(typeof lower === typeof upper, + "assert_between_exclusive", description, + "expected lower (${type_lower}) and upper (${type_upper}) types to match (test error)", + {type_lower:typeof lower, type_upper:typeof upper}); + + assert(typeof actual === "number" || typeof actual === "bigint", "assert_between_exclusive", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof lower, + "assert_between_exclusive", description, + "expected a ${type_lower} but got a ${type_actual}", + {type_lower:typeof lower, type_actual:typeof actual}); + assert(actual > lower && actual < upper, "assert_between_exclusive", description, "expected a number greater than ${lower} " + @@ -1819,21 +1917,26 @@ /** * Assert that ``actual`` is a number less than or equal to ``expected``. * - * @param {number} actual - Test value. - * @param {number} expected - Number that ``actual`` must be less + * @param {number|bigint} actual - Test value. + * @param {number|bigint} expected - Value that ``actual`` must be less * than or equal to. * @param {string} [description] - Description of the condition being tested. */ function assert_less_than_equal(actual, expected, description) { /* - * Test if a primitive number is less than or equal to another + * Test if a primitive number (or bigint) is less than or equal to another */ - assert(typeof actual === "number", + assert(typeof actual === "number" || typeof actual === "bigint", "assert_less_than_equal", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof expected, + "assert_less_than_equal", description, + "expected a ${type_expected} but got a ${type_actual}", + {type_expected:typeof expected, type_actual:typeof actual}); + assert(actual <= expected, "assert_less_than_equal", description, "expected a number less than or equal to ${expected} but got ${actual}", @@ -1844,21 +1947,26 @@ /** * Assert that ``actual`` is a number greater than or equal to ``expected``. * - * @param {number} actual - Test value. - * @param {number} expected - Number that ``actual`` must be greater + * @param {number|bigint} actual - Test value. + * @param {number|bigint} expected - Value that ``actual`` must be greater * than or equal to. * @param {string} [description] - Description of the condition being tested. */ function assert_greater_than_equal(actual, expected, description) { /* - * Test if a primitive number is greater than or equal to another + * Test if a primitive number (or bigint) is greater than or equal to another */ - assert(typeof actual === "number", + assert(typeof actual === "number" || typeof actual === "bigint", "assert_greater_than_equal", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof expected, + "assert_greater_than_equal", description, + "expected a ${type_expected} but got a ${type_actual}", + {type_expected:typeof expected, type_actual:typeof actual}); + assert(actual >= expected, "assert_greater_than_equal", description, "expected a number greater than or equal to ${expected} but got ${actual}", @@ -1870,21 +1978,31 @@ * Assert that ``actual`` is a number greater than or equal to ``lower`` and less * than or equal to ``upper``. * - * @param {number} actual - Test value. - * @param {number} lower - Number that ``actual`` must be greater than or equal to. - * @param {number} upper - Number that ``actual`` must be less than or equal to. + * @param {number|bigint} actual - Test value. + * @param {number|bigint} lower - Value that ``actual`` must be greater than or equal to. + * @param {number|bigint} upper - Value that ``actual`` must be less than or equal to. * @param {string} [description] - Description of the condition being tested. */ function assert_between_inclusive(actual, lower, upper, description) { /* - * Test if a primitive number is between to two others or equal to either of them + * Test if a primitive number (or bigint) is between to two others or equal to either of them */ - assert(typeof actual === "number", + assert(typeof lower === typeof upper, + "assert_between_inclusive", description, + "expected lower (${type_lower}) and upper (${type_upper}) types to match (test error)", + {type_lower:typeof lower, type_upper:typeof upper}); + + assert(typeof actual === "number" || typeof actual === "bigint", "assert_between_inclusive", description, "expected a number but got a ${type_actual}", {type_actual:typeof actual}); + assert(typeof actual === typeof lower, + "assert_between_inclusive", description, + "expected a ${type_lower} but got a ${type_actual}", + {type_lower:typeof lower, type_actual:typeof actual}); + assert(actual >= lower && actual <= upper, "assert_between_inclusive", description, "expected a number greater than or equal to ${lower} " + @@ -2009,30 +2127,46 @@ /** - * Assert that ``object`` has a property named ``property_name`` and that the property is readonly. + * Assert that ``object`` has a property named ``property_name`` and that the property is not writable or has no setter. * - * Note: The implementation tries to update the named property, so - * any side effects of updating will be triggered. Users are - * encouraged to instead inspect the property descriptor of ``property_name`` on ``object``. - * - * @param {Object} object - Object that should have the given property in its prototype chain. + * @param {Object} object - Object that should have the given (not necessarily own) property. * @param {string} property_name - Expected property name. * @param {string} [description] - Description of the condition being tested. */ function assert_readonly(object, property_name, description) { - var initial_value = object[property_name]; - try { - //Note that this can have side effects in the case where - //the property has PutForwards - object[property_name] = initial_value + "a"; //XXX use some other value here? - assert(same_value(object[property_name], initial_value), - "assert_readonly", description, - "changing property ${p} succeeded", - {p:property_name}); - } finally { - object[property_name] = initial_value; - } + assert(property_name in object, + "assert_readonly", description, + "property ${p} not found", + {p:property_name}); + + let desc; + while (object && (desc = Object.getOwnPropertyDescriptor(object, property_name)) === undefined) { + object = Object.getPrototypeOf(object); + } + + assert(desc !== undefined, + "assert_readonly", description, + "could not find a descriptor for property ${p}", + {p:property_name}); + + if (desc.hasOwnProperty("value")) { + // We're a data property descriptor + assert(desc.writable === false, "assert_readonly", description, + "descriptor [[Writable]] expected false got ${actual}", {actual:desc.writable}); + } else if (desc.hasOwnProperty("get") || desc.hasOwnProperty("set")) { + // We're an accessor property descriptor + assert(desc.set === undefined, "assert_readonly", description, + "property ${p} is an accessor property with a [[Set]] attribute, cannot test readonly-ness", + {p:property_name}); + } else { + // We're a generic property descriptor + // This shouldn't happen, because Object.getOwnPropertyDescriptor + // forwards the return value of [[GetOwnProperty]] (P), which must + // be a fully populated Property Descriptor or Undefined. + assert(false, "assert_readonly", description, + "Object.getOwnPropertyDescriptor must return a fully populated property descriptor"); + } } expose_assert(assert_readonly, "assert_readonly"); @@ -2078,7 +2212,7 @@ {func:func}); // Basic sanity-check on the passed-in constructor - assert(typeof constructor == "function", + assert(typeof constructor === "function", assertion_type, description, "${constructor} is not a constructor", {constructor:constructor}); @@ -2151,9 +2285,9 @@ func = funcOrConstructor; description = descriptionOrFunc; assert(maybeDescription === undefined, - "Too many args pased to no-constructor version of assert_throws_dom"); + "Too many args passed to no-constructor version of assert_throws_dom, or accidentally explicitly passed undefined"); } - assert_throws_dom_impl(type, func, description, "assert_throws_dom", constructor) + assert_throws_dom_impl(type, func, description, "assert_throws_dom", constructor); } expose_assert(assert_throws_dom, "assert_throws_dom"); @@ -2186,8 +2320,8 @@ {func:func}); // Sanity-check our type - assert(typeof type == "number" || - typeof type == "string", + assert(typeof type === "number" || + typeof type === "string", assertion_type, description, "${type} is not a number or string", {type:type}); @@ -2211,7 +2345,6 @@ NETWORK_ERR: 'NetworkError', ABORT_ERR: 'AbortError', URL_MISMATCH_ERR: 'URLMismatchError', - QUOTA_EXCEEDED_ERR: 'QuotaExceededError', TIMEOUT_ERR: 'TimeoutError', INVALID_NODE_TYPE_ERR: 'InvalidNodeTypeError', DATA_CLONE_ERR: 'DataCloneError' @@ -2236,7 +2369,6 @@ NetworkError: 19, AbortError: 20, URLMismatchError: 21, - QuotaExceededError: 22, TimeoutError: 23, InvalidNodeTypeError: 24, DataCloneError: 25, @@ -2267,12 +2399,19 @@ if (typeof type === "number") { if (type === 0) { throw new AssertionError('Test bug: ambiguous DOMException code 0 passed to assert_throws_dom()'); - } else if (!(type in code_name_map)) { + } + if (type === 22) { + throw new AssertionError('Test bug: QuotaExceededError needs to be tested for using assert_throws_quotaexceedederror()'); + } + if (!(type in code_name_map)) { throw new AssertionError('Test bug: unrecognized DOMException code "' + type + '" passed to assert_throws_dom()'); } name = code_name_map[type]; required_props.code = type; } else if (typeof type === "string") { + if (name === "QuotaExceededError") { + throw new AssertionError('Test bug: QuotaExceededError needs to be tested for using assert_throws_quotaexceedederror()'); + } name = type in codename_name_map ? codename_name_map[type] : type; if (!(name in name_code_map)) { throw new AssertionError('Test bug: unrecognized DOMException code name or name "' + type + '" passed to assert_throws_dom()'); @@ -2307,6 +2446,137 @@ } } + /** + * Assert a `QuotaExceededError` with the expected values is thrown. + * + * There are two ways of calling `assert_throws_quotaexceedederror`: + * + * 1) If the `QuotaExceededError` is expected to come from the + * current global, the first argument should be the function + * expected to throw, the second and a third the expected + * `requested` and `quota` property values, and the fourth, + * optional, argument is the assertion description. + * + * 2) If the `QuotaExceededError` is expected to come from some + * other global, the first argument should be the + * `QuotaExceededError` constructor from that global, the second + * argument should be the function expected to throw, the third + * and fourth the expected `requested` and `quota` property + * values, and the fifth, optional, argument is the assertion + * description. + * + * For the `requested` and `quota` values, instead of `null` or a + * number, the caller can provide a function which determines + * whether the value is acceptable by returning a boolean. + * + */ + function assert_throws_quotaexceedederror(funcOrConstructor, requestedOrFunc, quotaOrRequested, descriptionOrQuota, maybeDescription) + { + let constructor, func, requested, quota, description; + if (funcOrConstructor.name === "QuotaExceededError") { + constructor = funcOrConstructor; + func = requestedOrFunc; + requested = quotaOrRequested; + quota = descriptionOrQuota; + description = maybeDescription; + } else { + constructor = self.QuotaExceededError; + func = funcOrConstructor; + requested = requestedOrFunc; + quota = quotaOrRequested; + description = descriptionOrQuota; + assert(maybeDescription === undefined, + "Too many args passed to no-constructor version of assert_throws_quotaexceedederror"); + } + assert_throws_quotaexceedederror_impl(func, requested, quota, description, "assert_throws_quotaexceedederror", constructor); + } + expose_assert(assert_throws_quotaexceedederror, "assert_throws_quotaexceedederror"); + + /** + * Similar to `assert_throws_quotaexceedederror` but allows + * specifying the assertion type + * (`"assert_throws_quotaexceedederror"` or + * `"promise_rejects_quotaexceedederror"`, in practice). The + * `constructor` argument must be the `QuotaExceededError` + * constructor from the global we expect the exception to come from. + */ + function assert_throws_quotaexceedederror_impl(func, requested, quota, description, assertion_type, constructor) + { + try { + func.call(this); + assert(false, assertion_type, description, "${func} did not throw", + {func}); + } catch (e) { + if (e instanceof AssertionError) { + throw e; + } + + // Basic sanity-checks on the thrown exception. + assert(typeof e === "object", + assertion_type, description, + "${func} threw ${e} with type ${type}, not an object", + {func, e, type:typeof e}); + + assert(e !== null, + assertion_type, description, + "${func} threw null, not an object", + {func}); + + // Sanity-check our requested and quota. + assert(requested === null || + typeof requested === "number" || + typeof requested === "function", + assertion_type, description, + "${requested} is not null, a number, or a function", + {requested}); + assert(quota === null || + typeof quota === "number" || + typeof quota === "function", + assertion_type, description, + "${quota} is not null or a number", + {quota}); + + const required_props = { + code: 22, + name: "QuotaExceededError" + }; + if (typeof requested !== "function") { + required_props.requested = requested; + } + if (typeof quota !== "function") { + required_props.quota = quota; + } + + for (const [prop, expected] of Object.entries(required_props)) { + assert(prop in e && e[prop] == expected, + assertion_type, description, + "${func} threw ${e} that is not a correct QuotaExceededError: property ${prop} is equal to ${actual}, expected ${expected}", + {func, e, prop, actual:e[prop], expected}); + } + + if (typeof requested === "function") { + assert(requested(e.requested), + assertion_type, description, + "${func} threw ${e} that is not a correct QuotaExceededError: requested value ${requested} did not pass the requested predicate", + {func, e, requested}); + } + if (typeof quota === "function") { + assert(quota(e.quota), + assertion_type, description, + "${func} threw ${e} that is not a correct QuotaExceededError: quota value ${quota} did not pass the quota predicate", + {func, e, quota}); + } + + // Check that the exception is from the right global. This check is last + // so more specific, and more informative, checks on the properties can + // happen in case a totally incorrect exception is thrown. + assert(e.constructor === constructor, + assertion_type, description, + "${func} threw an exception from the wrong global", + {func}); + } + } + /** * Assert the provided value is thrown. * @@ -2416,7 +2686,7 @@ function assert_implements(condition, description) { assert(!!condition, "assert_implements", description); } - expose_assert(assert_implements, "assert_implements") + expose_assert(assert_implements, "assert_implements"); /** * Assert that an optional feature is implemented, based on a 'truthy' condition. @@ -2530,11 +2800,11 @@ 2: "Timeout", 3: "Not Run", 4: "Optional Feature Unsupported", - } + }; Test.prototype.format_status = function() { return this.status_formats[this.status]; - } + }; Test.prototype.structured_clone = function() { @@ -2715,7 +2985,8 @@ Test.prototype.step_timeout = function(func, timeout) { var test_this = this; var args = Array.prototype.slice.call(arguments, 2); - return setTimeout(this.step_func(function() { + var local_set_timeout = typeof global_scope.setTimeout === "undefined" ? fake_set_timeout : setTimeout; + return local_set_timeout(this.step_func(function() { return func.apply(test_this, args); }), timeout * tests.timeout_multiplier); }; @@ -2746,6 +3017,7 @@ var timeout_full = timeout * tests.timeout_multiplier; var remaining = Math.ceil(timeout_full / interval); var test_this = this; + var local_set_timeout = typeof global_scope.setTimeout === 'undefined' ? fake_set_timeout : setTimeout; const step = test_this.step_func((result) => { if (result) { @@ -2756,7 +3028,7 @@ "Timed out waiting on condition"); } remaining--; - setTimeout(wait_for_inner, interval); + local_set_timeout(wait_for_inner, interval); } }); @@ -2842,7 +3114,7 @@ return new Promise(resolve => { this.step_wait_func(cond, resolve, description, timeout, interval); }); - } + }; /* * Private method for registering cleanup functions. `testharness.js` @@ -3075,7 +3347,7 @@ throw new Error("AbortController is not supported in this browser"); } return this._abortController.signal; - } + }; /** * A RemoteTest object mirrors a Test object on a remote worker. The @@ -3151,11 +3423,11 @@ function(callback) { callback(); }); - } + }; RemoteTest.prototype.format_status = function() { return Test.prototype.status_formats[this.status]; - } + }; /* * A RemoteContext listens for test events from a remote test context, such @@ -3427,7 +3699,7 @@ this.all_done_callbacks = []; this.hide_test_state = false; - this.pending_remotes = []; + this.remotes = []; this.current_test = null; this.asserts_run = []; @@ -3469,26 +3741,26 @@ for (var p in properties) { if (properties.hasOwnProperty(p)) { var value = properties[p]; - if (p == "allow_uncaught_exception") { + if (p === "allow_uncaught_exception") { this.allow_uncaught_exception = value; - } else if (p == "explicit_done" && value) { + } else if (p === "explicit_done" && value) { this.wait_for_finish = true; - } else if (p == "explicit_timeout" && value) { + } else if (p === "explicit_timeout" && value) { this.timeout_length = null; if (this.timeout_id) { clearTimeout(this.timeout_id); } - } else if (p == "single_test" && value) { + } else if (p === "single_test" && value) { this.set_file_is_test(); - } else if (p == "timeout_multiplier") { + } else if (p === "timeout_multiplier") { this.timeout_multiplier = value; if (this.timeout_length) { this.timeout_length *= this.timeout_multiplier; } - } else if (p == "hide_test_state") { + } else if (p === "hide_test_state") { this.hide_test_state = value; - } else if (p == "output") { + } else if (p === "output") { this.output = value; } else if (p === "debug") { settings.debug = value; @@ -3581,11 +3853,14 @@ Tests.prototype.push = function(test) { + if (this.phase === this.phases.COMPLETE) { + return; + } if (this.phase < this.phases.HAVE_TESTS) { this.start(); } this.num_pending++; - test.index = this.tests.push(test); + test.index = this.tests.push(test) - 1; this.notify_test_state(test); }; @@ -3598,11 +3873,11 @@ }; Tests.prototype.all_done = function() { - return (this.tests.length > 0 || this.pending_remotes.length > 0) && + return (this.tests.length > 0 || this.remotes.length > 0) && test_environment.all_loaded && (this.num_pending === 0 || this.is_aborted) && !this.wait_for_finish && !this.processing_callbacks && - !this.pending_remotes.some(function(w) { return w.running; }); + !this.remotes.some(function(w) { return w.running; }); }; Tests.prototype.start = function() { @@ -3671,7 +3946,8 @@ function(test, testDone) { if (test.phase === test.phases.INITIAL) { - test.phase = test.phases.COMPLETE; + test.phase = test.phases.HAS_RESULT; + test.done(); testDone(); } else { add_test_done_callback(test, testDone); @@ -3682,14 +3958,14 @@ }; Tests.prototype.set_assert = function(assert_name, args) { - this.asserts_run.push(new AssertRecord(this.current_test, assert_name, args)) - } + this.asserts_run.push(new AssertRecord(this.current_test, assert_name, args)); + }; Tests.prototype.set_assert_status = function(index, status, stack) { let assert_record = this.asserts_run[index]; assert_record.status = status; assert_record.stack = stack; - } + }; /** * Update the harness status to reflect an unrecoverable harness error that @@ -3831,7 +4107,7 @@ } var remoteContext = this.create_remote_worker(worker); - this.pending_remotes.push(remoteContext); + this.remotes.push(remoteContext); return remoteContext.done; }; @@ -3854,7 +4130,7 @@ } var remoteContext = this.create_remote_window(remote); - this.pending_remotes.push(remoteContext); + this.remotes.push(remoteContext); return remoteContext.done; }; @@ -4066,8 +4342,8 @@ } else { var root = output_document.documentElement; var is_html = (root && - root.namespaceURI == "http://www.w3.org/1999/xhtml" && - root.localName == "html"); + root.namespaceURI === "http://www.w3.org/1999/xhtml" && + root.localName === "html"); var is_svg = (output_document.defaultView && "SVGSVGElement" in output_document.defaultView && root instanceof output_document.defaultView.SVGSVGElement); @@ -4169,11 +4445,7 @@ status ], ], - ["button", - {"onclick": "let evt = new Event('__test_restart'); " + - "let canceled = !window.dispatchEvent(evt);" + - "if (!canceled) { location.reload() }"}, - "Rerun"] + ["button", {"id":"rerun"}, "Rerun"] ]]; if (harness_status.status === harness_status.ERROR) { @@ -4205,6 +4477,13 @@ log.appendChild(render(summary_template, {num_tests:tests.length}, output_document)); + output_document.getElementById("rerun").addEventListener("click", + function() { + let evt = new Event('__test_restart'); + let canceled = !window.dispatchEvent(evt); + if (!canceled) { location.reload(); } + }); + forEach(output_document.querySelectorAll("section#summary label"), function(element) { @@ -4229,18 +4508,6 @@ }); }); - // This use of innerHTML plus manual escaping is not recommended in - // general, but is necessary here for performance. Using textContent - // on each individual adds tens of seconds of execution time for - // large test suites (tens of thousands of tests). - function escape_html(s) - { - return s.replace(/\&/g, "&") - .replace(/ { - var output_fn = "" + escape_html(assert.assert_name) + "("; - var prefix_len = output_fn.length; - var output_args = assert.args; - var output_len = output_args.reduce((prev, current) => prev+current, prefix_len); - if (output_len[output_len.length - 1] > 50) { - output_args = output_args.map((x, i) => - (i > 0 ? " ".repeat(prefix_len) : "" )+ x + (i < output_args.length - 1 ? ",\n" : "")); - } else { - output_args = output_args.map((x, i) => x + (i < output_args.length - 1 ? ", " : "")); - } - output_fn += escape_html(output_args.join("")); - output_fn += ')'; - var output_location; + + const table = asserts_output.querySelector("table"); + for (const assert of asserts) { + const status_class_name = status_class(Test.prototype.status_formats[assert.status]); + var output_fn = "(" + assert.args.join(", ") + ")"; if (assert.stack) { - output_location = assert.stack.split("\n", 1)[0].replace(/@?\w+:\/\/[^ "\/]+(?::\d+)?/g, " "); + output_fn += "\n"; + output_fn += assert.stack.split("\n", 1)[0].replace(/@?\w+:\/\/[^ "\/]+(?::\d+)?/g, " "); } - return "" + - "" + - Test.prototype.status_formats[assert.status] + "" + - "
" +
-                    output_fn +
-                    (output_location ? "\n" + escape_html(output_location) : "") +
-                    "
"; + table.appendChild(render( + ["tr", {"class":"overall-" + status_class_name}, + ["td", {"class":status_class_name}, Test.prototype.status_formats[assert.status]], + ["td", {}, ["pre", {}, ["strong", {}, assert.assert_name], output_fn]] ])); } - ).join("\n"); - rv += ""; - return rv; + return asserts_output; } - log.appendChild(document.createElementNS(xhtml_ns, "section")); var assertions = has_assertions(); - var html = "

Details

" + - "" + - (assertions ? "" : "") + - "" + - ""; - for (var i = 0; i < tests.length; i++) { - var test = tests[i]; - html += '' + - '"; - } - html += "
ResultTest NameAssertionMessage
' + - test.format_status() + - "" + - escape_html(test.name) + - "" + - (assertions ? escape_html(get_assertion(test)) + "" : "") + - escape_html(test.message ? tests[i].message : " ") + - (tests[i].stack ? "
" +
-                 escape_html(tests[i].stack) +
-                 "
": ""); + const section = render( + ["section", {}, + ["h2", {}, "Details"], + ["table", {"id":"results", "class":(assertions ? "assertions" : "")}, + ["thead", {}, + ["tr", {}, + ["th", {}, "Result"], + ["th", {}, "Test Name"], + (assertions ? ["th", {}, "Assertion"] : ""), + ["th", {}, "Message" ]]], + ["tbody", {}]]]); + + const tbody = section.querySelector("tbody"); + for (const test of tests) { + const status = test.format_status(); + const status_class_name = status_class(status); + tbody.appendChild(render( + ["tr", {"class":"overall-" + status_class_name}, + ["td", {"class":status_class_name}, status], + ["td", {}, test.name], + (assertions ? ["td", {}, get_assertion(test)] : ""), + ["td", {}, + test.message ?? "", + ["pre", {}, test.stack ?? ""]]])); if (!(test instanceof RemoteTest)) { - html += "
Asserts run" + get_asserts_output(test) + "
" + tbody.lastChild.lastChild.appendChild(get_asserts_output(test)); } - html += "
"; - try { - log.lastChild.innerHTML = html; - } catch (e) { - log.appendChild(document.createElementNS(xhtml_ns, "p")) - .textContent = "Setting innerHTML for the log threw an exception."; - log.appendChild(document.createElementNS(xhtml_ns, "pre")) - .textContent = html; } + log.appendChild(section); }; /* @@ -4410,13 +4659,20 @@ { var substitution_re = /\$\{([^ }]*)\}/g; - function do_substitution(input) { + function do_substitution(input) + { var components = input.split(substitution_re); var rv = []; - for (var i = 0; i < components.length; i += 2) { - rv.push(components[i]); - if (components[i + 1]) { - rv.push(String(substitutions[components[i + 1]])); + if (components.length === 1) { + rv = components; + } else if (substitutions) { + for (var i = 0; i < components.length; i += 2) { + if (components[i]) { + rv.push(components[i]); + } + if (substitutions[components[i + 1]]) { + rv.push(String(substitutions[components[i + 1]])); + } } } return rv; @@ -4532,7 +4788,7 @@ */ function AssertionError(message) { - if (typeof message == "string") { + if (typeof message === "string") { message = sanitize_unpaired_surrogates(message); } this.message = message; @@ -4578,7 +4834,7 @@ } return lines.slice(i).join("\n"); - } + }; function OptionalFeatureUnsupportedError(message) { @@ -4767,11 +5023,21 @@ return META_TITLE; } if ('location' in global_scope && 'pathname' in location) { - return location.pathname.substring(location.pathname.lastIndexOf('/') + 1, location.pathname.indexOf('.')); + var filename = location.pathname.substring(location.pathname.lastIndexOf('/') + 1); + return filename.substring(0, filename.indexOf('.')); } return "Untitled"; } + /** Fetches a JSON resource and parses it */ + async function fetch_json(resource) { + const response = await fetch(resource); + return await response.json(); + } + if (!global_scope.GLOBAL || !global_scope.GLOBAL.isShadowRealm()) { + expose(fetch_json, 'fetch_json'); + } + /** * Setup globals */ diff --git a/test/fixtures/wpt/resources/testharnessreport.js b/test/fixtures/wpt/resources/testharnessreport.js index e5cb40fe0ef652..405a2d8b06f00f 100644 --- a/test/fixtures/wpt/resources/testharnessreport.js +++ b/test/fixtures/wpt/resources/testharnessreport.js @@ -14,31 +14,6 @@ * parameters they are called with see testharness.js */ -function dump_test_results(tests, status) { - var results_element = document.createElement("script"); - results_element.type = "text/json"; - results_element.id = "__testharness__results__"; - var test_results = tests.map(function(x) { - return {name:x.name, status:x.status, message:x.message, stack:x.stack} - }); - var data = {test:window.location.href, - tests:test_results, - status: status.status, - message: status.message, - stack: status.stack}; - results_element.textContent = JSON.stringify(data); - - // To avoid a HierarchyRequestError with XML documents, ensure that 'results_element' - // is inserted at a location that results in a valid document. - var parent = document.body - ? document.body // is required in XHTML documents - : document.documentElement; // fallback for optional in HTML5, SVG, etc. - - parent.appendChild(results_element); -} - -add_completion_callback(dump_test_results); - /* If the parent window has a testharness_properties object, * we use this to provide the test settings. This is used by the * default in-browser runner to configure the timeout and the diff --git a/test/fixtures/wpt/resources/web-bluetooth-bidi-test.js b/test/fixtures/wpt/resources/web-bluetooth-bidi-test.js new file mode 100644 index 00000000000000..3283fef43fc7d2 --- /dev/null +++ b/test/fixtures/wpt/resources/web-bluetooth-bidi-test.js @@ -0,0 +1,408 @@ +'use strict' + +// Convert `manufacturerData` to an array of bluetooth.BluetoothManufacturerData +// defined in +// https://webbluetoothcg.github.io/web-bluetooth/#bluetooth-bidi-definitions. +function convertToBidiManufacturerData(manufacturerData) { + const bidiManufacturerData = []; + for (const key in manufacturerData) { + bidiManufacturerData.push({ + key: parseInt(key), + data: btoa(String.fromCharCode(...manufacturerData[key])) + }) + } + return bidiManufacturerData; +} + +function ArrayToMojoCharacteristicProperties(arr) { + const struct = {}; + arr.forEach(property => { + struct[property] = true; + }); + return struct; +} + +class FakeBluetooth { + constructor() { + this.fake_central_ = null; + } + + // Returns a promise that resolves with a FakeCentral that clients can use + // to simulate events that a device in the Central/Observer role would + // receive as well as monitor the operations performed by the device in the + // Central/Observer role. + // + // A "Central" object would allow its clients to receive advertising events + // and initiate connections to peripherals i.e. operations of two roles + // defined by the Bluetooth Spec: Observer and Central. + // See Bluetooth 4.2 Vol 3 Part C 2.2.2 "Roles when Operating over an + // LE Physical Transport". + async simulateCentral({state}) { + if (this.fake_central_) { + throw 'simulateCentral() should only be called once'; + } + + await test_driver.bidi.bluetooth.simulate_adapter({state: state}); + this.fake_central_ = new FakeCentral(); + return this.fake_central_; + } +} + +// FakeCentral allows clients to simulate events that a device in the +// Central/Observer role would receive as well as monitor the operations +// performed by the device in the Central/Observer role. +class FakeCentral { + constructor() { + this.peripherals_ = new Map(); + } + + // Simulates a peripheral with |address|, |name|, |manufacturerData| and + // |known_service_uuids| that has already been connected to the system. If the + // peripheral existed already it updates its name, manufacturer data, and + // known UUIDs. |known_service_uuids| should be an array of + // BluetoothServiceUUIDs + // https://webbluetoothcg.github.io/web-bluetooth/#typedefdef-bluetoothserviceuuid + // + // Platforms offer methods to retrieve devices that have already been + // connected to the system or weren't connected through the UA e.g. a user + // connected a peripheral through the system's settings. This method is + // intended to simulate peripherals that those methods would return. + async simulatePreconnectedPeripheral( + {address, name, manufacturerData = {}, knownServiceUUIDs = []}) { + await test_driver.bidi.bluetooth.simulate_preconnected_peripheral({ + address: address, + name: name, + manufacturerData: convertToBidiManufacturerData(manufacturerData), + knownServiceUuids: + knownServiceUUIDs.map(uuid => BluetoothUUID.getService(uuid)) + }); + + return this.fetchOrCreatePeripheral_(address); + } + + // Create a fake_peripheral object from the given address. + fetchOrCreatePeripheral_(address) { + let peripheral = this.peripherals_.get(address); + if (peripheral === undefined) { + peripheral = new FakePeripheral(address); + this.peripherals_.set(address, peripheral); + } + return peripheral; + } +} + +class FakePeripheral { + constructor(address) { + this.address = address; + } + + // Adds a fake GATT Service with |uuid| to be discovered when discovering + // the peripheral's GATT Attributes. Returns a FakeRemoteGATTService + // corresponding to this service. |uuid| should be a BluetoothServiceUUIDs + // https://webbluetoothcg.github.io/web-bluetooth/#typedefdef-bluetoothserviceuuid + async addFakeService({uuid}) { + const service_uuid = BluetoothUUID.getService(uuid); + await test_driver.bidi.bluetooth.simulate_service({ + address: this.address, + uuid: service_uuid, + type: 'add', + }); + return new FakeRemoteGATTService(service_uuid, this.address); + } + + // Sets the next GATT Connection request response to |code|. |code| could be + // an HCI Error Code from BT 4.2 Vol 2 Part D 1.3 List Of Error Codes or a + // number outside that range returned by specific platforms e.g. Android + // returns 0x101 to signal a GATT failure + // https://developer.android.com/reference/android/bluetooth/BluetoothGatt.html#GATT_FAILURE + async setNextGATTConnectionResponse({code}) { + const remove_handler = + test_driver.bidi.bluetooth.gatt_connection_attempted.on((event) => { + if (event.address != this.address) { + return; + } + remove_handler(); + test_driver.bidi.bluetooth.simulate_gatt_connection_response({ + address: event.address, + code, + }); + }); + } + + async setNextGATTDiscoveryResponse({code}) { + // No-op for Web Bluetooth Bidi test, it will be removed when migration + // completes. + return Promise.resolve(); + } + + // Simulates a GATT connection response with |code| from the peripheral. + async simulateGATTConnectionResponse(code) { + await test_driver.bidi.bluetooth.simulate_gatt_connection_response( + {address: this.address, code}); + } + + // Simulates a GATT disconnection in the peripheral. + async simulateGATTDisconnection() { + await test_driver.bidi.bluetooth.simulate_gatt_disconnection( + {address: this.address}); + } +} + +class FakeRemoteGATTService { + constructor(service_uuid, peripheral_address) { + this.service_uuid_ = service_uuid; + this.peripheral_address_ = peripheral_address; + } + + // Adds a fake GATT Characteristic with |uuid| and |properties| + // to this fake service. The characteristic will be found when discovering + // the peripheral's GATT Attributes. Returns a FakeRemoteGATTCharacteristic + // corresponding to the added characteristic. + async addFakeCharacteristic({uuid, properties}) { + const characteristic_uuid = BluetoothUUID.getCharacteristic(uuid); + await test_driver.bidi.bluetooth.simulate_characteristic({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: characteristic_uuid, + characteristicProperties: ArrayToMojoCharacteristicProperties(properties), + type: 'add' + }); + return new FakeRemoteGATTCharacteristic( + characteristic_uuid, this.service_uuid_, this.peripheral_address_); + } + + // Removes the fake GATT service from its fake peripheral. + async remove() { + await test_driver.bidi.bluetooth.simulate_service({ + address: this.peripheral_address_, + uuid: this.service_uuid_, + type: 'remove' + }); + } +} + +class FakeRemoteGATTCharacteristic { + constructor(characteristic_uuid, service_uuid, peripheral_address) { + this.characteristic_uuid_ = characteristic_uuid; + this.service_uuid_ = service_uuid; + this.peripheral_address_ = peripheral_address; + this.last_written_value_ = {lastValue: null, lastWriteType: 'none'}; + } + + // Adds a fake GATT Descriptor with |uuid| to be discovered when + // discovering the peripheral's GATT Attributes. Returns a + // FakeRemoteGATTDescriptor corresponding to this descriptor. |uuid| should + // be a BluetoothDescriptorUUID + // https://webbluetoothcg.github.io/web-bluetooth/#typedefdef-bluetoothdescriptoruuid + async addFakeDescriptor({uuid}) { + const descriptor_uuid = BluetoothUUID.getDescriptor(uuid); + await test_driver.bidi.bluetooth.simulate_descriptor({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: this.characteristic_uuid_, + descriptorUuid: descriptor_uuid, + type: 'add' + }); + return new FakeRemoteGATTDescriptor( + descriptor_uuid, this.characteristic_uuid_, this.service_uuid_, + this.peripheral_address_); + } + + // Simulate a characteristic for operation |type| with response |code| and + // |data|. + async simulateResponse(type, code, data) { + await test_driver.bidi.bluetooth.simulate_characteristic_response({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: this.characteristic_uuid_, + type, + code, + data, + }); + } + + // Simulate a characteristic response for read operation with response |code| + // and |data|. + async simulateReadResponse(code, data) { + await this.simulateResponse('read', code, data); + } + + // Simulate a characteristic response for write operation with response + // |code|. + async simulateWriteResponse(code) { + await this.simulateResponse('write', code); + } + + // Sets the next read response for characteristic to |code| and |value|. + // |code| could be a GATT Error Response from + // BT 4.2 Vol 3 Part F 3.4.1.1 Error Response or a number outside that range + // returned by specific platforms e.g. Android returns 0x101 to signal a GATT + // failure. + // https://developer.android.com/reference/android/bluetooth/BluetoothGatt.html#GATT_FAILURE + async setNextReadResponse(gatt_code, value = null) { + if (gatt_code === 0 && value === null) { + throw '|value| can\'t be null if read should success.'; + } + if (gatt_code !== 0 && value !== null) { + throw '|value| must be null if read should fail.'; + } + + const remove_handler = + test_driver.bidi.bluetooth.characteristic_event_generated.on( + (event) => { + if (event.address != this.peripheral_address_) { + return; + } + remove_handler(); + this.simulateReadResponse(gatt_code, value); + }); + } + + // Sets the next write response for this characteristic to |code|. If + // writing to a characteristic that only supports 'write-without-response' + // the set response will be ignored. + // |code| could be a GATT Error Response from + // BT 4.2 Vol 3 Part F 3.4.1.1 Error Response or a number outside that range + // returned by specific platforms e.g. Android returns 0x101 to signal a GATT + // failure. + async setNextWriteResponse(gatt_code) { + const remove_handler = + test_driver.bidi.bluetooth.characteristic_event_generated.on( + (event) => { + if (event.address != this.peripheral_address_) { + return; + } + this.last_written_value_ = { + lastValue: event.data, + lastWriteType: event.type + }; + remove_handler(); + if (event.type == 'write-with-response') { + this.simulateWriteResponse(gatt_code); + } + }); + } + + // Gets the last successfully written value to the characteristic and its + // write type. Write type is one of 'none', 'default-deprecated', + // 'with-response', 'without-response'. Returns {lastValue: null, + // lastWriteType: 'none'} if no value has yet been written to the + // characteristic. + async getLastWrittenValue() { + return this.last_written_value_; + } + + // Removes the fake GATT Characteristic from its fake service. + async remove() { + await test_driver.bidi.bluetooth.simulate_characteristic({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: this.characteristic_uuid_, + characteristicProperties: undefined, + type: 'remove' + }); + } +} + +class FakeRemoteGATTDescriptor { + constructor( + descriptor_uuid, characteristic_uuid, service_uuid, peripheral_address) { + this.descriptor_uuid_ = descriptor_uuid; + this.characteristic_uuid_ = characteristic_uuid; + this.service_uuid_ = service_uuid; + this.peripheral_address_ = peripheral_address; + this.last_written_value_ = null; + } + + // Simulate a descriptor for operation |type| with response |code| and + // |data|. + async simulateResponse(type, code, data) { + await test_driver.bidi.bluetooth.simulate_descriptor_response({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: this.characteristic_uuid_, + descriptorUuid: this.descriptor_uuid_, + type, + code, + data, + }); + } + + // Simulate a descriptor response for read operation with response |code| and + // |data|. + async simulateReadResponse(code, data) { + await this.simulateResponse('read', code, data); + } + + // Simulate a descriptor response for write operation with response |code|. + async simulateWriteResponse(code) { + await this.simulateResponse('write', code); + } + + // Sets the next read response for descriptor to |code| and |value|. + // |code| could be a GATT Error Response from + // BT 4.2 Vol 3 Part F 3.4.1.1 Error Response or a number outside that range + // returned by specific platforms e.g. Android returns 0x101 to signal a GATT + // failure. + // https://developer.android.com/reference/android/bluetooth/BluetoothGatt.html#GATT_FAILURE + async setNextReadResponse(gatt_code, value = null) { + if (gatt_code === 0 && value === null) { + throw '|value| can\'t be null if read should success.'; + } + if (gatt_code !== 0 && value !== null) { + throw '|value| must be null if read should fail.'; + } + + const remove_handler = + test_driver.bidi.bluetooth.descriptor_event_generated.on((event) => { + if (event.address != this.peripheral_address_) { + return; + } + remove_handler(); + this.simulateReadResponse(gatt_code, value); + }); + } + + // Sets the next write response for this descriptor to |code|. + // |code| could be a GATT Error Response from + // BT 4.2 Vol 3 Part F 3.4.1.1 Error Response or a number outside that range + // returned by specific platforms e.g. Android returns 0x101 to signal a GATT + // failure. + async setNextWriteResponse(gatt_code) { + const remove_handler = + test_driver.bidi.bluetooth.descriptor_event_generated.on((event) => { + if (event.address != this.peripheral_address_) { + return; + } + this.last_written_value_ = { + lastValue: event.data, + lastWriteType: event.type + }; + remove_handler(); + if (event.type == 'write-with-response') { + this.simulateWriteResponse(gatt_code); + } + }); + } + + // Gets the last successfully written value to the descriptor. + // Returns null if no value has yet been written to the descriptor. + async getLastWrittenValue() { + return this.last_written_value_; + } + + // Removes the fake GATT Descriptor from its fake characteristic. + async remove() { + await test_driver.bidi.bluetooth.simulate_descriptor({ + address: this.peripheral_address_, + serviceUuid: this.service_uuid_, + characteristicUuid: this.characteristic_uuid_, + descriptorUuid: this.descriptor_uuid_, + type: 'remove' + }); + } +} + +function initializeBluetoothBidiResources() { + navigator.bluetooth.test = new FakeBluetooth(); +} diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index c52725e388d334..666b1d9deaa780 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -64,7 +64,7 @@ "path": "resource-timing" }, "resources": { - "commit": "1e140d63ec885703ce24b3798abd81912696bb85", + "commit": "1d2c5fb36a6e477c8f915bde7eca027be6abe792", "path": "resources" }, "streams": { @@ -88,7 +88,7 @@ "path": "wasm/webapi" }, "WebCryptoAPI": { - "commit": "591c95ce6174690b92833cd92859ce2807714591", + "commit": "1d2c5fb36a6e477c8f915bde7eca027be6abe792", "path": "WebCryptoAPI" }, "webidl/ecmascript-binding/es-exceptions": { @@ -100,7 +100,7 @@ "path": "webmessaging/broadcastchannel" }, "webstorage": { - "commit": "1291340aaaa6e73db43b412e47401eca3830c556", + "commit": "1d2c5fb36a6e477c8f915bde7eca027be6abe792", "path": "webstorage" } } diff --git a/test/fixtures/wpt/webstorage/storage_local_setitem_quotaexceedederr.window.js b/test/fixtures/wpt/webstorage/storage_local_setitem_quotaexceedederr.window.js index fff7d6444a039e..f2f3c4d6887542 100644 --- a/test/fixtures/wpt/webstorage/storage_local_setitem_quotaexceedederr.window.js +++ b/test/fixtures/wpt/webstorage/storage_local_setitem_quotaexceedederr.window.js @@ -1,16 +1,18 @@ -test(function() { +test(t => { localStorage.clear(); var index = 0; var key = "name"; var val = "x".repeat(1024); - assert_throws_dom("QUOTA_EXCEEDED_ERR", function() { + t.add_cleanup(() => { + localStorage.clear(); + }); + + assert_throws_quotaexceedederror(() => { while (true) { index++; localStorage.setItem("" + key + index, "" + val + index); } - }); - - localStorage.clear(); + }, null, null); }, "Throws QuotaExceededError when the quota has been exceeded"); diff --git a/test/fixtures/wpt/webstorage/storage_session_setitem_quotaexceedederr.window.js b/test/fixtures/wpt/webstorage/storage_session_setitem_quotaexceedederr.window.js index 42a895470efa62..693c98d29f99fd 100644 --- a/test/fixtures/wpt/webstorage/storage_session_setitem_quotaexceedederr.window.js +++ b/test/fixtures/wpt/webstorage/storage_session_setitem_quotaexceedederr.window.js @@ -1,16 +1,18 @@ -test(function() { +test(t => { sessionStorage.clear(); var index = 0; var key = "name"; var val = "x".repeat(1024); - assert_throws_dom("QUOTA_EXCEEDED_ERR", function() { + t.add_cleanup(() => { + sessionStorage.clear(); + }); + + assert_throws_quotaexceedederror(() => { while (true) { index++; sessionStorage.setItem("" + key + index, "" + val + index); } - }); - - sessionStorage.clear(); + }, null, null); }, "Throws QuotaExceededError when the quota has been exceeded"); diff --git a/test/fixtures/wpt/webstorage/symbol-props.window.js b/test/fixtures/wpt/webstorage/symbol-props.window.js index 61dd8f83dc4f5b..8f598d7076909d 100644 --- a/test/fixtures/wpt/webstorage/symbol-props.window.js +++ b/test/fixtures/wpt/webstorage/symbol-props.window.js @@ -39,10 +39,10 @@ Object.defineProperty(storage, key, { "value": "test", "configurable": false }); assert_equals(storage[key], "test"); var desc = Object.getOwnPropertyDescriptor(storage, key); - assert_true(desc.configurable, "configurable"); + assert_false(desc.configurable, "configurable"); - assert_true(delete storage[key]); - assert_equals(storage[key], undefined); + assert_false(delete storage[key]); + assert_equals(storage[key], "test"); }, name + ": defineProperty not configurable"); test(function() { diff --git a/test/js-native-api/test_object/test.js b/test/js-native-api/test_object/test.js index 670ae200a35bec..a745d45e6f75e1 100644 --- a/test/js-native-api/test_object/test.js +++ b/test/js-native-api/test_object/test.js @@ -140,7 +140,7 @@ assert.strictEqual(newObject.test_string, 'test string'); test_object.Wrap(wrapper); assert(test_object.Unwrap(wrapper)); - assert(wrapper.protoA); + assert.strictEqual(wrapper.protoA, true); } { @@ -155,8 +155,8 @@ assert.strictEqual(newObject.test_string, 'test string'); Object.setPrototypeOf(wrapper, protoB); assert(test_object.Unwrap(wrapper)); - assert(wrapper.protoA, true); - assert(wrapper.protoB, true); + assert.strictEqual(wrapper.protoA, true); + assert.strictEqual(wrapper.protoB, true); } { diff --git a/test/known_issues/test-fs-cp-sync-dereference.js b/test/known_issues/test-fs-cp-sync-dereference.js new file mode 100644 index 00000000000000..fbb07a8f781520 --- /dev/null +++ b/test/known_issues/test-fs-cp-sync-dereference.js @@ -0,0 +1,39 @@ +'use strict'; + +// Refs: https://github.com/nodejs/node/issues/58939 +// +// The cpSync function is not correctly handling the `dereference` option. +// In this test, both the cp and cpSync functions are attempting to copy +// a file over a symlinked directory. In the cp case it works fine. In the +// cpSync case it fails with an error. + +const common = require('../common'); + +const { + cp, + cpSync, + mkdirSync, + symlinkSync, + writeFileSync, +} = require('fs'); + +const { + join, +} = require('path'); + +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); + +const pathA = join(tmpdir.path, 'a'); +const pathB = join(tmpdir.path, 'b'); +const pathC = join(tmpdir.path, 'c'); +const pathD = join(tmpdir.path, 'd'); + +writeFileSync(pathA, 'file a'); +mkdirSync(pathB); +symlinkSync(pathB, pathC, 'dir'); +symlinkSync(pathB, pathD, 'dir'); + +cp(pathA, pathD, { dereference: false }, common.mustSucceed()); + +cpSync(pathA, pathC, { dereference: false }); diff --git a/test/message/assert_throws_stack.out b/test/message/assert_throws_stack.out index b1f3ea2108ba9c..1ecda64889e07f 100644 --- a/test/message/assert_throws_stack.out +++ b/test/message/assert_throws_stack.out @@ -9,6 +9,7 @@ AssertionError [ERR_ASSERTION]: Expected values to be strictly deep-equal: - Comparison { - bar: true - } + at Object. (*assert_throws_stack.js:*:*) at * at * @@ -32,7 +33,8 @@ AssertionError [ERR_ASSERTION]: Expected values to be strictly deep-equal: at * at *, expected: { bar: true }, - operator: 'throws' + operator: 'throws', + diff: 'simple' } Node.js * diff --git a/test/module-hooks/test-module-hooks-custom-conditions-cjs.js b/test/module-hooks/test-module-hooks-custom-conditions-cjs.js new file mode 100644 index 00000000000000..4dcb9c3a20f19e --- /dev/null +++ b/test/module-hooks/test-module-hooks-custom-conditions-cjs.js @@ -0,0 +1,57 @@ +// Similar to test-module-hooks-custom-conditions.mjs, but checking the +// real require() instead of the re-invented one for imported CJS. +'use strict'; +const common = require('../common'); +const { registerHooks } = require('node:module'); +const assert = require('node:assert'); +const { cjs, esm } = require('../fixtures/es-modules/custom-condition/load.cjs'); + +(async () => { + // Without hooks, the default condition is used. + assert.strictEqual(cjs('foo').result, 'default'); + assert.strictEqual((await esm('foo')).result, 'default'); + + // Prepending 'foo' to the conditions array in the resolve hook should + // allow a CJS to be resolved with that condition. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo', ...context.conditions]; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/second').result, 'foo'); + assert.strictEqual((await esm('foo/second')).result, 'foo'); + hooks.deregister(); + } + + // Prepending 'foo-esm' to the conditions array in the resolve hook should + // allow a ESM to be resolved with that condition. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo-esm', ...context.conditions]; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/third').result, 'foo-esm'); + assert.strictEqual((await esm('foo/third')).result, 'foo-esm'); + hooks.deregister(); + } + + // Duplicating the 'foo' condition in the resolve hook should not change the result. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo', ...context.conditions, 'foo']; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/fourth').result, 'foo'); + assert.strictEqual((await esm('foo/fourth')).result, 'foo'); + hooks.deregister(); + } +})().then(common.mustCall()); diff --git a/test/module-hooks/test-module-hooks-custom-conditions-special-values.js b/test/module-hooks/test-module-hooks-custom-conditions-special-values.js new file mode 100644 index 00000000000000..9a84c788c77d05 --- /dev/null +++ b/test/module-hooks/test-module-hooks-custom-conditions-special-values.js @@ -0,0 +1,70 @@ +// Check various special values of `conditions` in the context object +// when using synchronous module hooks to override the loaders in a +// CJS module. +'use strict'; +const common = require('../common'); +const { registerHooks } = require('node:module'); +const assert = require('node:assert'); +const { cjs, esm } = require('../fixtures/es-modules/custom-condition/load.cjs'); + +(async () => { + // Setting it to undefined would lead to the default conditions being used. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + context.conditions = undefined; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo').result, 'default'); + assert.strictEqual((await esm('foo')).result, 'default'); + hooks.deregister(); + } + + // Setting it to an empty array would lead to the default conditions being used. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + context.conditions = []; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/second').result, 'default'); + assert.strictEqual((await esm('foo/second')).result, 'default'); + hooks.deregister(); + } + + // If the exports have no default export, it should error. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + context.conditions = []; + return nextResolve(specifier, context); + }, + }); + assert.throws(() => cjs('foo/no-default'), { + code: 'ERR_PACKAGE_PATH_NOT_EXPORTED', + }); + await assert.rejects(esm('foo/no-default'), { + code: 'ERR_PACKAGE_PATH_NOT_EXPORTED', + }); + hooks.deregister(); + } + + // If the exports have no default export, it should error. + { + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + context.conditions = 'invalid'; + return nextResolve(specifier, context); + }, + }); + assert.throws(() => cjs('foo/third'), { + code: 'ERR_INVALID_ARG_VALUE', + }); + await assert.rejects(esm('foo/third'), { + code: 'ERR_INVALID_ARG_VALUE', + }); + hooks.deregister(); + } +})().then(common.mustCall()); diff --git a/test/module-hooks/test-module-hooks-custom-conditions.mjs b/test/module-hooks/test-module-hooks-custom-conditions.mjs new file mode 100644 index 00000000000000..ef2022cf190902 --- /dev/null +++ b/test/module-hooks/test-module-hooks-custom-conditions.mjs @@ -0,0 +1,53 @@ +// This tests that custom conditions can be used in module resolution hooks. +import '../common/index.mjs'; +import { registerHooks } from 'node:module'; +import assert from 'node:assert'; +import { cjs, esm } from '../fixtures/es-modules/custom-condition/load.cjs'; + +// Without hooks, the default condition is used. +assert.strictEqual(cjs('foo').result, 'default'); +assert.strictEqual((await esm('foo')).result, 'default'); + +// Prepending 'foo' to the conditions array in the resolve hook should +// allow a CJS to be resolved with that condition. +{ + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo', ...context.conditions]; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/second').result, 'foo'); + assert.strictEqual((await esm('foo/second')).result, 'foo'); + hooks.deregister(); +} + +// Prepending 'foo-esm' to the conditions array in the resolve hook should +// allow a ESM to be resolved with that condition. +{ + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo-esm', ...context.conditions]; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/third').result, 'foo-esm'); + assert.strictEqual((await esm('foo/third')).result, 'foo-esm'); + hooks.deregister(); +} + +// Duplicating the 'foo' condition in the resolve hook should not change the result. +{ + const hooks = registerHooks({ + resolve(specifier, context, nextResolve) { + assert(Array.isArray(context.conditions)); + context.conditions = ['foo', ...context.conditions, 'foo']; + return nextResolve(specifier, context); + }, + }); + assert.strictEqual(cjs('foo/fourth').result, 'foo'); + assert.strictEqual((await esm('foo/fourth')).result, 'foo'); + hooks.deregister(); +} diff --git a/test/module-hooks/test-module-hooks-import-wasm.mjs b/test/module-hooks/test-module-hooks-import-wasm.mjs index f2c357cd50390c..00e6cd41265077 100644 --- a/test/module-hooks/test-module-hooks-import-wasm.mjs +++ b/test/module-hooks/test-module-hooks-import-wasm.mjs @@ -1,6 +1,5 @@ -// Flags: --no-experimental-wasm-modules // This tests that module.registerHooks() can be used to support unknown formats, like -// import(wasm) (without --experimental-wasm-modules). +// import(wasm) import '../common/index.mjs'; import assert from 'node:assert'; diff --git a/test/module-hooks/test-module-hooks-load-esm-mock.js b/test/module-hooks/test-module-hooks-load-esm-mock.js index 88941b5d685f07..26bb01385014f7 100644 --- a/test/module-hooks/test-module-hooks-load-esm-mock.js +++ b/test/module-hooks/test-module-hooks-load-esm-mock.js @@ -48,4 +48,4 @@ function hook(code, filename) { assert.deepStrictEqual({ ...bar }, { $key: 'bar-esm' }); } -})().catch(common.mustNotCall()); +})().then(common.mustCall()); diff --git a/test/module-hooks/test-module-hooks-load-esm.js b/test/module-hooks/test-module-hooks-load-esm.js index 88941b5d685f07..26bb01385014f7 100644 --- a/test/module-hooks/test-module-hooks-load-esm.js +++ b/test/module-hooks/test-module-hooks-load-esm.js @@ -48,4 +48,4 @@ function hook(code, filename) { assert.deepStrictEqual({ ...bar }, { $key: 'bar-esm' }); } -})().catch(common.mustNotCall()); +})().then(common.mustCall()); diff --git a/test/module-hooks/test-module-hooks-require-wasm.js b/test/module-hooks/test-module-hooks-require-wasm.js index b4276bcc749a01..1cf7da6451f218 100644 --- a/test/module-hooks/test-module-hooks-require-wasm.js +++ b/test/module-hooks/test-module-hooks-require-wasm.js @@ -1,8 +1,7 @@ -// Flags: --no-experimental-wasm-modules 'use strict'; // This tests that module.registerHooks() can be used to support unknown formats, like -// require(wasm) and import(wasm) (without --experimental-wasm-modules). +// require(wasm) and import(wasm) const common = require('../common'); const assert = require('assert'); diff --git a/test/nop/nop.c b/test/nop/nop.c new file mode 100644 index 00000000000000..03b2213bb9a36c --- /dev/null +++ b/test/nop/nop.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 6303908ce180db..0546a81ef11ec7 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -143,11 +143,12 @@ test-tls-write-error: PASS, FLAKY # https://github.com/nodejs/node/issues/48047 test-http-pipeline-flood: SKIP # https://github.com/nodejs/node/issues/58582 -test-http-proxy-fetch: PASS, FLAKY -test-https-proxy-fetch: PASS, FLAKY -test-inspector-network-fetch: PASS, FLAKY -test-fetch: PASS, FLAKY -test-without-async-context-frame: PASS, FLAKY +test-http-proxy-fetch: SKIP +test-https-proxy-fetch: SKIP +test-inspector-network-fetch: SKIP +test-inspector-network-content-type: SKIP +test-fetch: SKIP +test-without-async-context-frame: SKIP test-process-cpuUsage: PASS, FLAKY diff --git a/test/parallel/test-assert-class-destructuring.js b/test/parallel/test-assert-class-destructuring.js new file mode 100644 index 00000000000000..eb353f095d2da2 --- /dev/null +++ b/test/parallel/test-assert-class-destructuring.js @@ -0,0 +1,133 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const { Assert } = require('assert'); +const { test } = require('node:test'); + +// Disable colored output to prevent color codes from breaking assertion +// message comparisons. This should only be an issue when process.stdout +// is a TTY. +if (process.stdout.isTTY) { + process.env.NODE_DISABLE_COLORS = '1'; +} + +test('Assert class destructuring behavior - diff option', () => { + const assertInstanceFull = new Assert({ diff: 'full' }); + const assertInstanceSimple = new Assert({ diff: 'simple' }); + + assertInstanceFull.throws( + () => assertInstanceFull.strictEqual({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'full'); + return true; + } + ); + + assertInstanceSimple.throws( + () => assertInstanceSimple.strictEqual({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + } + ); + + const { strictEqual: strictEqualSimple } = assertInstanceSimple; + const { strictEqual: strictEqualFull } = assertInstanceFull; + const { deepStrictEqual: deepStrictEqualFull } = assertInstanceFull; + const { equal: equalFull } = assertInstanceFull; + + assert.throws( + () => strictEqualSimple({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + } + ); + + assert.throws( + () => strictEqualFull({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + } + ); + + assert.throws( + () => deepStrictEqualFull({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + } + ); + + assert.throws( + () => equalFull({ a: 1 }, { a: 2 }), + (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + } + ); +}); + +test('Assert class destructuring behavior - strict option', () => { + const assertInstanceNonStrict = new Assert({ strict: false }); + const assertInstanceStrict = new Assert({ strict: true }); + + assertInstanceNonStrict.equal(2, '2'); + + assert.throws( + () => assertInstanceStrict.equal(2, '2'), + assert.AssertionError + ); + + const { equal: equalNonStrict } = assertInstanceNonStrict; + const { equal: equalStrict } = assertInstanceStrict; + + equalNonStrict(2, '2'); + assert.throws( + () => equalStrict(2, '2'), + assert.AssertionError + ); +}); + +test('Assert class destructuring behavior - comprehensive methods', () => { + const myAssert = new Assert({ diff: 'full', strict: false }); + + const { + fail, + equal, + strictEqual, + deepStrictEqual, + throws, + match, + doesNotMatch + } = myAssert; + + assert.throws(() => fail('test message'), (err) => { + assert.strictEqual(err.diff, 'simple'); + assert.strictEqual(err.message, 'test message'); + return true; + }); + + assert.throws(() => equal({ a: 1 }, { a: 2 }), (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + }); + + assert.throws(() => strictEqual({ a: 1 }, { a: 2 }), (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + }); + + assert.throws(() => deepStrictEqual({ a: 1 }, { a: 2 }), (err) => { + assert.strictEqual(err.diff, 'simple'); + return true; + }); + + throws(() => { throw new Error('test'); }, Error); + + match('hello world', /world/); + + doesNotMatch('hello world', /xyz/); +}); diff --git a/test/parallel/test-assert-class.js b/test/parallel/test-assert-class.js new file mode 100644 index 00000000000000..91b4ce8feac12b --- /dev/null +++ b/test/parallel/test-assert-class.js @@ -0,0 +1,480 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const { Assert } = require('assert'); +const { inspect } = require('util'); +const { test } = require('node:test'); + +// Disable colored output to prevent color codes from breaking assertion +// message comparisons. This should only be an issue when process.stdout +// is a TTY. +if (process.stdout.isTTY) { + process.env.NODE_DISABLE_COLORS = '1'; +} + +test('Assert constructor requires new', () => { + assert.throws(() => Assert(), { + code: 'ERR_CONSTRUCT_CALL_REQUIRED', + name: 'TypeError', + }); +}); + +test('Assert class non strict', () => { + const assertInstance = new Assert({ diff: undefined, strict: false }); + + assertInstance.ok( + assert.AssertionError.prototype instanceof Error, + 'assert.AssertionError instanceof Error' + ); + assert.strictEqual(typeof assertInstance.ok, 'function'); + assert.strictEqual(assertInstance.ok.strictEqual, undefined); + assert.strictEqual(typeof assertInstance.strictEqual, 'function'); + assertInstance.ok(true); + assertInstance.throws( + () => { + assertInstance.fail(); + }, + { + code: 'ERR_ASSERTION', + name: 'AssertionError', + message: 'Failed', + operator: 'fail', + actual: undefined, + expected: undefined, + generatedMessage: true, + stack: /Failed/, + } + ); + assertInstance.equal(undefined, undefined); + assertInstance.equal(null, undefined); + assertInstance.equal(2, '2'); + assertInstance.notEqual(true, false); + assertInstance.throws(() => assertInstance.deepEqual(/a/), { + code: 'ERR_MISSING_ARGS', + }); + assertInstance.throws(() => assertInstance.notDeepEqual('test'), { + code: 'ERR_MISSING_ARGS', + }); + assertInstance.notStrictEqual(2, '2'); + assertInstance.throws( + () => assertInstance.strictEqual(2, '2'), + assertInstance.AssertionError, + "strictEqual(2, '2')" + ); + assertInstance.throws( + () => { + assertInstance.partialDeepStrictEqual( + { a: true }, + { a: false }, + 'custom message' + ); + }, + { + code: 'ERR_ASSERTION', + name: 'AssertionError', + message: + 'custom message\n+ actual - expected\n\n {\n+ a: true\n- a: false\n }\n', + } + ); + assertInstance.throws(() => assertInstance.match(/abc/, 'string'), { + code: 'ERR_INVALID_ARG_TYPE', + message: + 'The "regexp" argument must be an instance of RegExp. ' + + "Received type string ('string')", + }); + assertInstance.throws(() => assertInstance.doesNotMatch(/abc/, 'string'), { + code: 'ERR_INVALID_ARG_TYPE', + message: + 'The "regexp" argument must be an instance of RegExp. ' + + "Received type string ('string')", + }); + + /* eslint-disable no-restricted-syntax */ + { + function thrower(errorConstructor) { + throw new errorConstructor({}); + } + + let threw = false; + try { + assertInstance.doesNotThrow( + () => thrower(TypeError), + assertInstance.AssertionError + ); + } catch (e) { + threw = true; + assertInstance.ok(e instanceof TypeError); + } + assertInstance.ok( + threw, + 'assertInstance.doesNotThrow with an explicit error is eating extra errors' + ); + } + { + let threw = false; + const rangeError = new RangeError('my range'); + + try { + assertInstance.doesNotThrow( + () => { + throw new TypeError('wrong type'); + }, + TypeError, + rangeError + ); + } catch (e) { + threw = true; + assertInstance.ok(e.message.includes(rangeError.message)); + assertInstance.ok(e instanceof assertInstance.AssertionError); + assertInstance.ok(!e.stack.includes('doesNotThrow'), e); + } + assertInstance.ok(threw); + } + /* eslint-enable no-restricted-syntax */ +}); + +test('Assert class strict', () => { + const assertInstance = new Assert(); + + assertInstance.equal(assertInstance.equal, assertInstance.strictEqual); + assertInstance.equal( + assertInstance.deepEqual, + assertInstance.deepStrictEqual + ); + assertInstance.equal(assertInstance.notEqual, assertInstance.notStrictEqual); + assertInstance.equal( + assertInstance.notDeepEqual, + assertInstance.notDeepStrictEqual + ); +}); + +test('Assert class with invalid diff option', () => { + assert.throws(() => new Assert({ diff: 'invalid' }), { + code: 'ERR_INVALID_ARG_VALUE', + name: 'TypeError', + message: "The property 'options.diff' must be one of: 'simple', 'full'. Received 'invalid'", + }); +}); + +const longLinesOfAs = 'A\n'.repeat(100); +const longLinesOFBs = 'B\n'.repeat(100); +const truncatedAs = 'A\\n'.repeat(10) + '...'; +const truncatedBs = 'B\\n'.repeat(10) + '...'; + +const longStringOfAs = 'A'.repeat(10_000); +const longStringOfBs = 'B'.repeat(10_000); + +const longLinesOfAsWithEllipsis = longStringOfAs.substring(0, 9_488) + '...'; +const longLinesOFBsWithEllipsis = longStringOfBs.substring(0, 9_488) + '...'; +test('Assert class non strict with full diff', () => { + const assertInstance = new Assert({ diff: 'full', strict: false }); + + // long strings + { + assertInstance.throws( + () => { + assertInstance.strictEqual(longStringOfAs, longStringOfBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'strictEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfBs); + + assertInstance.strictEqual( + err.message, + `Expected values to be strictly equal:\n+ actual - expected\n\n` + + `+ '${longStringOfAs}'\n- '${longStringOfBs}'\n` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOFBsWithEllipsis}'`) + ); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.notStrictEqual(longStringOfAs, longStringOfAs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'notStrictEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfAs); + + assertInstance.strictEqual( + err.message, + `Expected "actual" to be strictly unequal to:\n\n` + + `'${longStringOfAs}'` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOfAsWithEllipsis}'`) + ); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.deepEqual(longStringOfAs, longStringOfBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'deepEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfBs); + + assertInstance.strictEqual( + err.message, + `Expected values to be loosely deep-equal:\n\n` + + `'${longStringOfAs}'\n\nshould loosely deep-equal\n\n'${longStringOfBs}'` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOFBsWithEllipsis}'`) + ); + return true; + } + ); + } + + // long lines + { + assertInstance.throws( + () => { + assertInstance.strictEqual(longLinesOfAs, longLinesOFBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'strictEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOFBs); + + assertInstance.strictEqual(err.message.split('\n').length, 204); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + assertInstance.ok( + err.message.includes('Expected values to be strictly equal') + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedBs}`)); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.notStrictEqual(longLinesOfAs, longLinesOfAs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'notStrictEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOfAs); + + assertInstance.strictEqual(err.message.split('\n').length, 103); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + assertInstance.ok( + err.message.includes(`Expected "actual" to be strictly unequal to:`) + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedAs}`)); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.deepEqual(longLinesOfAs, longLinesOFBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'deepEqual'); + assertInstance.strictEqual(err.diff, 'full'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOFBs); + + assertInstance.strictEqual(err.message.split('\n').length, 205); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + assertInstance.ok( + err.message.includes(`Expected values to be loosely deep-equal:`) + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedBs}`)); + return true; + } + ); + } +}); + +test('Assert class non strict with simple diff', () => { + const assertInstance = new Assert({ diff: 'simple', strict: false }); + + // long strings + { + assertInstance.throws( + () => { + assertInstance.strictEqual(longStringOfAs, longStringOfBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'strictEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfBs); + + assertInstance.strictEqual( + err.message, + `Expected values to be strictly equal:\n+ actual - expected\n\n` + + `+ '${longStringOfAs}'\n- '${longStringOfBs}'\n` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOFBsWithEllipsis}'`) + ); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.notStrictEqual(longStringOfAs, longStringOfAs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'notStrictEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfAs); + + assertInstance.strictEqual( + err.message, + `Expected "actual" to be strictly unequal to:\n\n` + + `'${longStringOfAs}'` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOfAsWithEllipsis}'`) + ); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.deepEqual(longStringOfAs, longStringOfBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'deepEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longStringOfAs); + assertInstance.strictEqual(err.expected, longStringOfBs); + + assertInstance.strictEqual( + err.message, + `Expected values to be loosely deep-equal:\n\n` + + `'${ + longStringOfAs.substring(0, 508) + '...' + }\n\nshould loosely deep-equal\n\n'${ + longStringOfBs.substring(0, 508) + '...' + }` + ); + assertInstance.ok( + inspect(err).includes(`actual: '${longLinesOfAsWithEllipsis}'`) + ); + assertInstance.ok( + inspect(err).includes(`expected: '${longLinesOFBsWithEllipsis}'`) + ); + return true; + } + ); + } + + // long lines + { + assertInstance.throws( + () => { + assertInstance.strictEqual(longLinesOfAs, longLinesOFBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'strictEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOFBs); + assertInstance.strictEqual(err.message.split('\n').length, 204); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + + assertInstance.ok( + err.message.includes('Expected values to be strictly equal') + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedBs}`)); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.notStrictEqual(longLinesOfAs, longLinesOfAs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'notStrictEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOfAs); + + assertInstance.strictEqual(err.message.split('\n').length, 50); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + assertInstance.ok( + err.message.includes(`Expected "actual" to be strictly unequal to:`) + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedAs}`)); + return true; + } + ); + + assertInstance.throws( + () => { + assertInstance.deepEqual(longLinesOfAs, longLinesOFBs); + }, + (err) => { + assertInstance.strictEqual(err.code, 'ERR_ASSERTION'); + assertInstance.strictEqual(err.operator, 'deepEqual'); + assertInstance.strictEqual(err.diff, 'simple'); + assertInstance.strictEqual(err.actual, longLinesOfAs); + assertInstance.strictEqual(err.expected, longLinesOFBs); + + assertInstance.strictEqual(err.message.split('\n').length, 109); + assertInstance.strictEqual(err.actual.split('\n').length, 101); + assertInstance.ok( + err.message.includes(`Expected values to be loosely deep-equal:`) + ); + assertInstance.ok(inspect(err).includes(`actual: '${truncatedAs}`)); + assertInstance.ok(inspect(err).includes(`expected: '${truncatedBs}`)); + return true; + } + ); + } +}); diff --git a/test/parallel/test-blocklist.js b/test/parallel/test-blocklist.js index 4b1bc78cd5ac92..6895efcc1c00a2 100644 --- a/test/parallel/test-blocklist.js +++ b/test/parallel/test-blocklist.js @@ -287,3 +287,75 @@ const util = require('util'); assert(BlockList.isBlockList(new BlockList())); assert(!BlockList.isBlockList({})); } + +// Test exporting and importing the rule list to/from JSON +{ + const ruleList = [ + 'Address: IPv4 10.0.0.5', + 'Address: IPv6 ::', + 'Subnet: IPv4 192.168.1.0/24', + 'Subnet: IPv6 8592:757c:efae:4e45::/64', + ]; + + const test2 = new BlockList(); + const test3 = new BlockList(); + const test4 = new BlockList(); + const test5 = new BlockList(); + + const bl = new BlockList(); + bl.addAddress('10.0.0.5'); + bl.addAddress('::', 'ipv6'); + bl.addSubnet('192.168.1.0', 24); + bl.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + + // Test invalid inputs (input to fromJSON must be an array of + // string rules or a serialized json string of an array of + // string rules. + [ + 1, null, Symbol(), [1, 2, 3], '123', [Symbol()], new Map(), + ].forEach((i) => { + assert.throws(() => test2.fromJSON(i), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + // Invalid rules are ignored. + test2.fromJSON(['1', '2', '3']); + assert.deepStrictEqual(test2.rules, []); + + // Direct output from toJSON method works + test2.fromJSON(bl.toJSON()); + assert.deepStrictEqual(test2.rules.sort(), ruleList); + + // JSON stringified output works + test3.fromJSON(JSON.stringify(bl)); + assert.deepStrictEqual(test3.rules.sort(), ruleList); + + // A raw array works + test4.fromJSON(ruleList); + assert.deepStrictEqual(test4.rules.sort(), ruleList); + + // Individual rules work + ruleList.forEach((item) => { + test5.fromJSON([item]); + }); + assert.deepStrictEqual(test5.rules.sort(), ruleList); + + // Each of the created blocklists should handle the checks identically. + [ + ['10.0.0.5', 'ipv4', true], + ['10.0.0.6', 'ipv4', false], + ['::', 'ipv6', true], + ['::1', 'ipv6', false], + ['192.168.1.0', 'ipv4', true], + ['193.168.1.0', 'ipv4', false], + ['8592:757c:efae:4e45::', 'ipv6', true], + ['1111:1111:1111:1111::', 'ipv6', false], + ].forEach((i) => { + assert.strictEqual(bl.check(i[0], i[1]), i[2]); + assert.strictEqual(test2.check(i[0], i[1]), i[2]); + assert.strictEqual(test3.check(i[0], i[1]), i[2]); + assert.strictEqual(test4.check(i[0], i[1]), i[2]); + assert.strictEqual(test5.check(i[0], i[1]), i[2]); + }); +} diff --git a/test/parallel/test-buffer-write-fast.js b/test/parallel/test-buffer-write-fast.js index 4594934f75838c..4f385992ed6472 100644 --- a/test/parallel/test-buffer-write-fast.js +++ b/test/parallel/test-buffer-write-fast.js @@ -41,5 +41,5 @@ testFastUtf8Write(); if (common.isDebug) { const { getV8FastApiCallCount } = internalBinding('debug'); - assert(getV8FastApiCallCount('buffer.writeString'), 4); + assert.strictEqual(getV8FastApiCallCount('buffer.writeString'), 4); } diff --git a/test/parallel/test-crypto-aes-wrap.js b/test/parallel/test-crypto-aes-wrap.js index 21d48d8a3fbae7..951e93d728e32b 100644 --- a/test/parallel/test-crypto-aes-wrap.js +++ b/test/parallel/test-crypto-aes-wrap.js @@ -53,6 +53,11 @@ const key3 = Buffer.from('29c9eab5ed5ad44134a1437fe2e673b4d88a5b7c72e68454fea087 text: '12345678123456781234567812345678123' }, ].forEach(({ algorithm, key, iv, text }) => { + if (!crypto.getCiphers().includes(algorithm)) { + common.printSkipMessage(`Skipping unsupported ${algorithm} test case`); + return; + } + const cipher = crypto.createCipheriv(algorithm, key, iv); const decipher = crypto.createDecipheriv(algorithm, key, iv); const msg = decipher.update(cipher.update(text, 'utf8'), 'buffer', 'utf8'); diff --git a/test/parallel/test-crypto-async-sign-verify.js b/test/parallel/test-crypto-async-sign-verify.js index b35dd08e6c4979..7a5e72b1e8e498 100644 --- a/test/parallel/test-crypto-async-sign-verify.js +++ b/test/parallel/test-crypto-async-sign-verify.js @@ -138,9 +138,9 @@ test('dsa_public.pem', 'dsa_private.pem', 'sha256', false, verify('sha256', data, publicKey, signature), verify('sha256', data, publicKey, signature), verify('sha256', data, publicKey, signature), - ]).then(common.mustCall()); + ]); }) - .catch(common.mustNotCall()); + .then(common.mustCall()); } { diff --git a/test/parallel/test-crypto-authenticated-stream.js b/test/parallel/test-crypto-authenticated-stream.js index fcd53aa4696abc..51b928ec36be1f 100644 --- a/test/parallel/test-crypto-authenticated-stream.js +++ b/test/parallel/test-crypto-authenticated-stream.js @@ -115,6 +115,11 @@ function fstream(config) { fstream.count = 0; function test(config) { + if (!crypto.getCiphers().includes(config.cipher)) { + common.printSkipMessage(`unsupported cipher: ${config.cipher}`); + return; + } + direct(config); mstream(config); fstream(config); diff --git a/test/parallel/test-crypto-authenticated.js b/test/parallel/test-crypto-authenticated.js index 5f74f7c2611138..e8fedf2d5d5072 100644 --- a/test/parallel/test-crypto-authenticated.js +++ b/test/parallel/test-crypto-authenticated.js @@ -248,49 +248,24 @@ for (const test of TEST_CASES) { // Test that create(De|C)ipheriv throws if the mode is CCM and an invalid // authentication tag length has been specified. { - for (const authTagLength of [-1, true, false, NaN, 5.5]) { - assert.throws(() => { - crypto.createCipheriv('aes-256-ccm', - 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', - 'qkuZpJWCewa6S', - { - authTagLength - }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.authTagLength' is invalid. " + - `Received ${inspect(authTagLength)}` - }); - - assert.throws(() => { - crypto.createDecipheriv('aes-256-ccm', + if (!ciphers.includes('aes-256-ccm')) { + common.printSkipMessage(`unsupported aes-256-ccm test`); + } else { + for (const authTagLength of [-1, true, false, NaN, 5.5]) { + assert.throws(() => { + crypto.createCipheriv('aes-256-ccm', 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', 'qkuZpJWCewa6S', { authTagLength }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.authTagLength' is invalid. " + - `Received ${inspect(authTagLength)}` - }); - } - - // The following values will not be caught by the JS layer and thus will not - // use the default error codes. - for (const authTagLength of [0, 1, 2, 3, 5, 7, 9, 11, 13, 15, 17, 18]) { - assert.throws(() => { - crypto.createCipheriv('aes-256-ccm', - 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', - 'qkuZpJWCewa6S', - { - authTagLength - }); - }, errMessages.authTagLength); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.authTagLength' is invalid. " + + `Received ${inspect(authTagLength)}` + }); - if (!isFipsEnabled) { assert.throws(() => { crypto.createDecipheriv('aes-256-ccm', 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', @@ -298,7 +273,36 @@ for (const test of TEST_CASES) { { authTagLength }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.authTagLength' is invalid. " + + `Received ${inspect(authTagLength)}` + }); + } + + // The following values will not be caught by the JS layer and thus will not + // use the default error codes. + for (const authTagLength of [0, 1, 2, 3, 5, 7, 9, 11, 13, 15, 17, 18]) { + assert.throws(() => { + crypto.createCipheriv('aes-256-ccm', + 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', + 'qkuZpJWCewa6S', + { + authTagLength + }); }, errMessages.authTagLength); + + if (!isFipsEnabled) { + assert.throws(() => { + crypto.createDecipheriv('aes-256-ccm', + 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', + 'qkuZpJWCewa6S', + { + authTagLength + }); + }, errMessages.authTagLength); + } } } } @@ -307,6 +311,11 @@ for (const test of TEST_CASES) { // authentication tag length has been specified. { for (const mode of ['ccm', 'ocb']) { + if (!ciphers.includes(`aes-256-${mode}`)) { + common.printSkipMessage(`unsupported aes-256-${mode} test`); + continue; + } + assert.throws(() => { crypto.createCipheriv(`aes-256-${mode}`, 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', @@ -330,84 +339,96 @@ for (const test of TEST_CASES) { // Test that setAAD throws if an invalid plaintext length has been specified. { - const cipher = crypto.createCipheriv('aes-256-ccm', - 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', - 'qkuZpJWCewa6S', - { - authTagLength: 10 - }); - - for (const plaintextLength of [-1, true, false, NaN, 5.5]) { - assert.throws(() => { - cipher.setAAD(Buffer.from('0123456789', 'hex'), { plaintextLength }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.plaintextLength' is invalid. " + - `Received ${inspect(plaintextLength)}` - }); + if (!ciphers.includes('aes-256-ccm')) { + common.printSkipMessage(`unsupported aes-256-ccm test`); + } else { + const cipher = crypto.createCipheriv('aes-256-ccm', + 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', + 'qkuZpJWCewa6S', + { + authTagLength: 10 + }); + + for (const plaintextLength of [-1, true, false, NaN, 5.5]) { + assert.throws(() => { + cipher.setAAD(Buffer.from('0123456789', 'hex'), { plaintextLength }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.plaintextLength' is invalid. " + + `Received ${inspect(plaintextLength)}` + }); + } } } // Test that setAAD and update throw if the plaintext is too long. { - for (const ivLength of [13, 12]) { - const maxMessageSize = (1 << (8 * (15 - ivLength))) - 1; - const key = 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8'; - const cipher = () => crypto.createCipheriv('aes-256-ccm', key, - '0'.repeat(ivLength), - { - authTagLength: 10 - }); + if (!ciphers.includes('aes-256-ccm')) { + common.printSkipMessage(`unsupported aes-256-ccm test`); + } else { + for (const ivLength of [13, 12]) { + const maxMessageSize = (1 << (8 * (15 - ivLength))) - 1; + const key = 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8'; + const cipher = () => crypto.createCipheriv('aes-256-ccm', key, + '0'.repeat(ivLength), + { + authTagLength: 10 + }); - assert.throws(() => { - cipher().setAAD(Buffer.alloc(0), { - plaintextLength: maxMessageSize + 1 - }); - }, /Invalid message length$/); + assert.throws(() => { + cipher().setAAD(Buffer.alloc(0), { + plaintextLength: maxMessageSize + 1 + }); + }, /Invalid message length$/); - const msg = Buffer.alloc(maxMessageSize + 1); - assert.throws(() => { - cipher().update(msg); - }, /Invalid message length/); + const msg = Buffer.alloc(maxMessageSize + 1); + assert.throws(() => { + cipher().update(msg); + }, /Invalid message length/); - const c = cipher(); - c.setAAD(Buffer.alloc(0), { - plaintextLength: maxMessageSize - }); - c.update(msg.slice(1)); + const c = cipher(); + c.setAAD(Buffer.alloc(0), { + plaintextLength: maxMessageSize + }); + c.update(msg.slice(1)); + } } } // Test that setAAD throws if the mode is CCM and the plaintext length has not // been specified. { - assert.throws(() => { - const cipher = crypto.createCipheriv('aes-256-ccm', - 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', - 'qkuZpJWCewa6S', - { - authTagLength: 10 - }); - cipher.setAAD(Buffer.from('0123456789', 'hex')); - }, /options\.plaintextLength required for CCM mode with AAD/); - - if (!isFipsEnabled) { + if (!ciphers.includes('aes-256-ccm')) { + common.printSkipMessage(`unsupported aes-256-ccm test`); + } else { assert.throws(() => { - const cipher = crypto.createDecipheriv('aes-256-ccm', - 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', - 'qkuZpJWCewa6S', - { - authTagLength: 10 - }); + const cipher = crypto.createCipheriv('aes-256-ccm', + 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', + 'qkuZpJWCewa6S', + { + authTagLength: 10 + }); cipher.setAAD(Buffer.from('0123456789', 'hex')); }, /options\.plaintextLength required for CCM mode with AAD/); + + if (!isFipsEnabled) { + assert.throws(() => { + const cipher = crypto.createDecipheriv('aes-256-ccm', + 'FxLKsqdmv0E9xrQhp0b1ZgI0K7JFZJM8', + 'qkuZpJWCewa6S', + { + authTagLength: 10 + }); + cipher.setAAD(Buffer.from('0123456789', 'hex')); + }, /options\.plaintextLength required for CCM mode with AAD/); + } } } // Test that final() throws in CCM mode when no authentication tag is provided. { - if (!isFipsEnabled) { + if (!isFipsEnabled && ciphers.includes('aes-128-ccm')) { const key = Buffer.from('1ed2233fa2223ef5d7df08546049406c', 'hex'); const iv = Buffer.from('7305220bca40d4c90e1791e9', 'hex'); const ct = Buffer.from('8beba09d4d4d861f957d51c0794f4abf8030848e', 'hex'); @@ -439,12 +460,16 @@ for (const test of TEST_CASES) { // Test that an IV length of 11 does not overflow max_message_size_. { - const key = 'x'.repeat(16); - const iv = Buffer.from('112233445566778899aabb', 'hex'); - const options = { authTagLength: 8 }; - const encrypt = crypto.createCipheriv('aes-128-ccm', key, iv, options); - encrypt.update('boom'); // Should not throw 'Message exceeds maximum size'. - encrypt.final(); + if (!ciphers.includes('aes-128-ccm')) { + common.printSkipMessage(`unsupported aes-128-ccm test`); + } else { + const key = 'x'.repeat(16); + const iv = Buffer.from('112233445566778899aabb', 'hex'); + const options = { authTagLength: 8 }; + const encrypt = crypto.createCipheriv('aes-128-ccm', key, iv, options); + encrypt.update('boom'); // Should not throw 'Message exceeds maximum size'. + encrypt.final(); + } } // Test that the authentication tag can be set at any point before calling @@ -499,6 +524,11 @@ for (const test of TEST_CASES) { } for (const alg of ['aes-256-gcm', 'aes-256-ocb', 'chacha20-poly1305']) { + if (!ciphers.includes(alg)) { + common.printSkipMessage(`unsupported ${alg} test`); + continue; + } + for (const authTagLength of alg === 'aes-256-gcm' ? [undefined, 8] : [8]) { for (const [useAAD, useMessage] of [ [false, false], // No AAD, no update. @@ -520,6 +550,11 @@ for (const test of TEST_CASES) { const opts = { authTagLength: 8 }; for (const mode of ['gcm', 'ccm', 'ocb']) { + if (!ciphers.includes(`aes-128-${mode}`)) { + common.printSkipMessage(`unsupported aes-128-${mode} test`); + continue; + } + const cipher = crypto.createCipheriv(`aes-128-${mode}`, key, iv, opts); const ciphertext = Buffer.concat([cipher.update(plain), cipher.final()]); const tag = cipher.getAuthTag(); @@ -563,25 +598,29 @@ for (const test of TEST_CASES) { tampered: false, }; - // Invalid IV lengths should be detected: - // - 12 and below are valid. - // - 13-16 are not detected as invalid by some OpenSSL versions. - check(13); - check(14); - check(15); - check(16); - // - 17 and above were always detected as invalid by OpenSSL. - check(17); - - function check(ivLength) { - const prefix = ivLength - valid.iv.length / 2; - assert.throws(() => crypto.createCipheriv( - valid.algo, - Buffer.from(valid.key, 'hex'), - Buffer.from(H(prefix) + valid.iv, 'hex') - ), errMessages.length, `iv length ${ivLength} was not rejected`); - - function H(length) { return '00'.repeat(length); } + if (!ciphers.includes(valid.algo)) { + common.printSkipMessage(`unsupported ${valid.algo} test`); + } else { + // Invalid IV lengths should be detected: + // - 12 and below are valid. + // - 13-16 are not detected as invalid by some OpenSSL versions. + check(13); + check(14); + check(15); + check(16); + // - 17 and above were always detected as invalid by OpenSSL. + check(17); + + function check(ivLength) { + const prefix = ivLength - valid.iv.length / 2; + assert.throws(() => crypto.createCipheriv( + valid.algo, + Buffer.from(valid.key, 'hex'), + Buffer.from(H(prefix) + valid.iv, 'hex') + ), errMessages.length, `iv length ${ivLength} was not rejected`); + + function H(length) { return '00'.repeat(length); } + } } } diff --git a/test/parallel/test-crypto-default-shake-lengths.js b/test/parallel/test-crypto-default-shake-lengths.js index 1e558814ca0e4a..36c01d078a04a9 100644 --- a/test/parallel/test-crypto-default-shake-lengths.js +++ b/test/parallel/test-crypto-default-shake-lengths.js @@ -5,6 +5,11 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); +const crypto = require('crypto'); +if (!crypto.getHashes().includes('shake128')) { + common.skip('unsupported shake128 test'); +} + const { createHash } = require('crypto'); common.expectWarning({ diff --git a/test/parallel/test-crypto-des3-wrap.js b/test/parallel/test-crypto-des3-wrap.js index 75c8cd574fd662..54d8a4c4014abc 100644 --- a/test/parallel/test-crypto-des3-wrap.js +++ b/test/parallel/test-crypto-des3-wrap.js @@ -6,6 +6,10 @@ if (!common.hasCrypto) const assert = require('assert'); const crypto = require('crypto'); +const ciphers = crypto.getCiphers(); +if (!ciphers.includes('des3-wrap')) + common.skip('des3-wrap cipher is not available'); + // Test case for des-ede3 wrap/unwrap. des3-wrap needs extra 2x blocksize // then plaintext to store ciphertext. const test = { diff --git a/test/parallel/test-crypto-ecb.js b/test/parallel/test-crypto-ecb.js index 6439c9354a059e..06c88272438a05 100644 --- a/test/parallel/test-crypto-ecb.js +++ b/test/parallel/test-crypto-ecb.js @@ -37,6 +37,10 @@ if (hasOpenSSL3) { 'OpenSSl 3.x'); } +if (!crypto.getCiphers().includes('BF-ECB')) { + common.skip('BF-ECB cipher is not available'); +} + const assert = require('assert'); // Testing whether EVP_CipherInit_ex is functioning correctly. diff --git a/test/parallel/test-crypto-padding-aes256.js b/test/parallel/test-crypto-padding-aes256.js index 14d853bdfd0a5d..164d06413a7d14 100644 --- a/test/parallel/test-crypto-padding-aes256.js +++ b/test/parallel/test-crypto-padding-aes256.js @@ -27,6 +27,9 @@ if (!common.hasCrypto) const assert = require('assert'); const crypto = require('crypto'); +if (!crypto.getCiphers().includes('aes256')) + common.skip('aes256 cipher is not available'); + const iv = Buffer.from('00000000000000000000000000000000', 'hex'); const key = Buffer.from('0123456789abcdef0123456789abcdef' + '0123456789abcdef0123456789abcdef', 'hex'); diff --git a/test/parallel/test-crypto-rsa-dsa.js b/test/parallel/test-crypto-rsa-dsa.js index dcd5045daaf58c..119bc3c2d20ea7 100644 --- a/test/parallel/test-crypto-rsa-dsa.js +++ b/test/parallel/test-crypto-rsa-dsa.js @@ -9,7 +9,7 @@ const crypto = require('crypto'); const constants = crypto.constants; const fixtures = require('../common/fixtures'); -const { hasOpenSSL3 } = require('../common/crypto'); +const { hasOpenSSL, hasOpenSSL3 } = require('../common/crypto'); // Test certificates const certPem = fixtures.readKey('rsa_cert.crt'); @@ -225,20 +225,38 @@ function test_rsa(padding, encryptOaepHash, decryptOaepHash) { if (padding === constants.RSA_PKCS1_PADDING) { if (!process.config.variables.node_shared_openssl) { - assert.throws(() => { - crypto.privateDecrypt({ + // TODO(richardlau) remove check and else branch after deps/openssl + // is upgraded. + if (hasOpenSSL(3, 2)) { + let decryptedBuffer = crypto.privateDecrypt({ key: rsaKeyPem, padding: padding, oaepHash: decryptOaepHash }, encryptedBuffer); - }, { code: 'ERR_INVALID_ARG_VALUE' }); - assert.throws(() => { - crypto.privateDecrypt({ + assert.deepStrictEqual(decryptedBuffer, input); + + decryptedBuffer = crypto.privateDecrypt({ key: rsaPkcs8KeyPem, padding: padding, oaepHash: decryptOaepHash }, encryptedBuffer); - }, { code: 'ERR_INVALID_ARG_VALUE' }); + assert.deepStrictEqual(decryptedBuffer, input); + } else { + assert.throws(() => { + crypto.privateDecrypt({ + key: rsaKeyPem, + padding: padding, + oaepHash: decryptOaepHash + }, encryptedBuffer); + }, { code: 'ERR_INVALID_ARG_VALUE' }); + assert.throws(() => { + crypto.privateDecrypt({ + key: rsaPkcs8KeyPem, + padding: padding, + oaepHash: decryptOaepHash + }, encryptedBuffer); + }, { code: 'ERR_INVALID_ARG_VALUE' }); + } } else { // The version of a linked against OpenSSL. May // or may not support implicit rejection. Figuring diff --git a/test/parallel/test-debugger-address.mjs b/test/parallel/test-debugger-address.mjs index eab99c9b0e2fb3..c159ff2094e988 100644 --- a/test/parallel/test-debugger-address.mjs +++ b/test/parallel/test-debugger-address.mjs @@ -55,7 +55,7 @@ function launchTarget(...args) { try { const { childProc, host, port } = await launchTarget('--inspect=0', script); target = childProc; - cli = startCLI([`${host || '127.0.0.1'}:${port}`]); + cli = startCLI([`${host || '127.0.0.1'}:${port}`], [], {}, { randomPort: false }); await cli.waitForPrompt(); await cli.command('sb("alive.js", 3)'); await cli.waitFor(/break/); diff --git a/test/parallel/test-debugger-auto-resume.mjs b/test/parallel/test-debugger-auto-resume.mjs index 077258907d136b..797f090c67acb1 100644 --- a/test/parallel/test-debugger-auto-resume.mjs +++ b/test/parallel/test-debugger-auto-resume.mjs @@ -21,9 +21,7 @@ addLibraryPath(process.env); }; env.NODE_INSPECT_RESUME_ON_START = '1'; - const cli = startCLI(['--port=0', script], [], { - env, - }); + const cli = startCLI([script], [], { env }); await cli.waitForInitialBreak(); deepStrictEqual(cli.breakInfo, { diff --git a/test/parallel/test-debugger-backtrace.js b/test/parallel/test-debugger-backtrace.js index f66cc11d70a918..c189cb3f5b22e6 100644 --- a/test/parallel/test-debugger-backtrace.js +++ b/test/parallel/test-debugger-backtrace.js @@ -13,7 +13,7 @@ const path = require('path'); { const scriptFullPath = fixtures.path('debugger', 'backtrace.js'); const script = path.relative(process.cwd(), scriptFullPath); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); async function runTest() { try { diff --git a/test/parallel/test-debugger-break.js b/test/parallel/test-debugger-break.js index 8e3a290321a2e7..8f37b71571225e 100644 --- a/test/parallel/test-debugger-break.js +++ b/test/parallel/test-debugger-break.js @@ -11,7 +11,7 @@ const path = require('path'); const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); -const cli = startCLI(['--port=0', script]); +const cli = startCLI([script]); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-breakpoint-exists.js b/test/parallel/test-debugger-breakpoint-exists.js index 872fad2d82400c..e2efa8182e4ade 100644 --- a/test/parallel/test-debugger-breakpoint-exists.js +++ b/test/parallel/test-debugger-breakpoint-exists.js @@ -9,7 +9,7 @@ const startCLI = require('../common/debugger'); // Test for "Breakpoint at specified location already exists" error. const script = fixtures.path('debugger', 'three-lines.js'); -const cli = startCLI(['--port=0', script]); +const cli = startCLI([script]); (async () => { try { diff --git a/test/parallel/test-debugger-clear-breakpoints.js b/test/parallel/test-debugger-clear-breakpoints.js index 74623ec4371331..91349e105a1160 100644 --- a/test/parallel/test-debugger-clear-breakpoints.js +++ b/test/parallel/test-debugger-clear-breakpoints.js @@ -13,7 +13,7 @@ const path = require('path'); { const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); function onFatal(error) { cli.quit(); diff --git a/test/parallel/test-debugger-exceptions.js b/test/parallel/test-debugger-exceptions.js index 7f3e192251e924..3f75161a6b6e3d 100644 --- a/test/parallel/test-debugger-exceptions.js +++ b/test/parallel/test-debugger-exceptions.js @@ -13,7 +13,7 @@ const path = require('path'); { const scriptFullPath = fixtures.path('debugger', 'exceptions.js'); const script = path.relative(process.cwd(), scriptFullPath); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); (async () => { try { diff --git a/test/parallel/test-debugger-exec-scope.mjs b/test/parallel/test-debugger-exec-scope.mjs index 3e4241cd018fc4..08b37e279556f2 100644 --- a/test/parallel/test-debugger-exec-scope.mjs +++ b/test/parallel/test-debugger-exec-scope.mjs @@ -7,7 +7,7 @@ import startCLI from '../common/debugger.js'; import assert from 'assert'; -const cli = startCLI(['--port=0', path('debugger/backtrace.js')]); +const cli = startCLI([path('debugger/backtrace.js')]); try { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-exec.js b/test/parallel/test-debugger-exec.js index 536e0128ea2a84..2b6c50ab61ff2b 100644 --- a/test/parallel/test-debugger-exec.js +++ b/test/parallel/test-debugger-exec.js @@ -8,7 +8,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); -const cli = startCLI(['--port=0', fixtures.path('debugger/alive.js')]); +const cli = startCLI([fixtures.path('debugger/alive.js')]); async function waitInitialBreak() { try { diff --git a/test/parallel/test-debugger-heap-profiler.js b/test/parallel/test-debugger-heap-profiler.js index 56f0d8b5184f18..96c3ed0e1a556e 100644 --- a/test/parallel/test-debugger-heap-profiler.js +++ b/test/parallel/test-debugger-heap-profiler.js @@ -16,7 +16,7 @@ const filename = tmpdir.resolve('node.heapsnapshot'); // Heap profiler take snapshot. { const opts = { cwd: tmpdir.path }; - const cli = startCLI(['--port=0', fixtures.path('debugger/empty.js')], [], opts); + const cli = startCLI([fixtures.path('debugger/empty.js')], [], opts); async function waitInitialBreak() { try { diff --git a/test/parallel/test-debugger-help.mjs b/test/parallel/test-debugger-help.mjs index a4e659113bf79c..64f569831fba5e 100644 --- a/test/parallel/test-debugger-help.mjs +++ b/test/parallel/test-debugger-help.mjs @@ -7,7 +7,7 @@ import startCLI from '../common/debugger.js'; import assert from 'assert'; -const cli = startCLI(['--port=0', path('debugger', 'empty.js')]); +const cli = startCLI([path('debugger', 'empty.js')]); try { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-invalid-json.mjs b/test/parallel/test-debugger-invalid-json.mjs index e4754a465fcf5f..1c2b4b475b7e3a 100644 --- a/test/parallel/test-debugger-invalid-json.mjs +++ b/test/parallel/test-debugger-invalid-json.mjs @@ -17,7 +17,7 @@ const host = '127.0.0.1'; server.listen(0, mustCall(async () => { const port = server.address().port; - const cli = startCLI([`${host}:${port}`]); + const cli = startCLI([`${host}:${port}`], [], {}, { randomPort: false }); try { const code = await cli.quit(); assert.strictEqual(code, 1); @@ -35,7 +35,7 @@ const host = '127.0.0.1'; server.listen(0, host, mustCall(async () => { const port = server.address().port; - const cli = startCLI([`${host}:${port}`]); + const cli = startCLI([`${host}:${port}`], [], {}, { randomPort: false }); try { const code = await cli.quit(); assert.strictEqual(code, 1); diff --git a/test/parallel/test-debugger-list.js b/test/parallel/test-debugger-list.js index 6f2e36e763a651..594874e140b306 100644 --- a/test/parallel/test-debugger-list.js +++ b/test/parallel/test-debugger-list.js @@ -8,7 +8,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); -const cli = startCLI(['--port=0', fixtures.path('debugger/three-lines.js')]); +const cli = startCLI([fixtures.path('debugger/three-lines.js')]); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-low-level.js b/test/parallel/test-debugger-low-level.js index 31f67849f54748..93c8e1b625591d 100644 --- a/test/parallel/test-debugger-low-level.js +++ b/test/parallel/test-debugger-low-level.js @@ -9,7 +9,7 @@ const assert = require('assert'); // Debugger agent direct access. { - const cli = startCLI(['--port=0', fixtures.path('debugger/three-lines.js')]); + const cli = startCLI([fixtures.path('debugger/three-lines.js')]); const scriptPattern = /^\* (\d+): \S+debugger(?:\/|\\)three-lines\.js/m; async function testDebuggerLowLevel() { diff --git a/test/parallel/test-debugger-object-type-remote-object.js b/test/parallel/test-debugger-object-type-remote-object.js index a055e8ce0fb9e4..7404eae3963447 100644 --- a/test/parallel/test-debugger-object-type-remote-object.js +++ b/test/parallel/test-debugger-object-type-remote-object.js @@ -8,7 +8,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); -const cli = startCLI(['--port=0', fixtures.path('debugger/empty.js')]); +const cli = startCLI([fixtures.path('debugger/empty.js')]); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-preserve-breaks.js b/test/parallel/test-debugger-preserve-breaks.js index 00168c570d6b7c..bb0eba961432ec 100644 --- a/test/parallel/test-debugger-preserve-breaks.js +++ b/test/parallel/test-debugger-preserve-breaks.js @@ -14,7 +14,7 @@ const script = path.relative(process.cwd(), scriptFullPath); // Run after quit. const runTest = async () => { - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); try { await cli.waitForInitialBreak(); await cli.waitForPrompt(); diff --git a/test/parallel/test-debugger-profile-command.js b/test/parallel/test-debugger-profile-command.js index da81dfc6e10569..06818c2132d9c5 100644 --- a/test/parallel/test-debugger-profile-command.js +++ b/test/parallel/test-debugger-profile-command.js @@ -10,7 +10,7 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const cli = startCLI(['--port=0', fixtures.path('debugger/empty.js')]); +const cli = startCLI([fixtures.path('debugger/empty.js')]); const rootDir = path.resolve(__dirname, '..', '..'); diff --git a/test/parallel/test-debugger-profile.js b/test/parallel/test-debugger-profile.js index a59512cc1c6963..31f57ee06da65d 100644 --- a/test/parallel/test-debugger-profile.js +++ b/test/parallel/test-debugger-profile.js @@ -14,7 +14,7 @@ function delay(ms) { // Profiles. { - const cli = startCLI(['--port=0', fixtures.path('debugger/empty.js')], [], { + const cli = startCLI([fixtures.path('debugger/empty.js')], [], { env: { ...process.env, // When this test is run with NODE_V8_COVERAGE, it clobbers the inspector diff --git a/test/parallel/test-debugger-random-port-with-inspect-port.js b/test/parallel/test-debugger-random-port-with-inspect-port.js index 3acc6bdd733eb0..a7e74c0dbae6d6 100644 --- a/test/parallel/test-debugger-random-port-with-inspect-port.js +++ b/test/parallel/test-debugger-random-port-with-inspect-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Random port with --inspect-port=0. const script = fixtures.path('debugger', 'three-lines.js'); -const cli = startCLI(['--inspect-port=0', script]); +const cli = startCLI(['--inspect-port=0', script], [], {}, { randomPort: false }); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-random-port.js b/test/parallel/test-debugger-random-port.js index da8656cf1c7115..b6a0c98797423f 100644 --- a/test/parallel/test-debugger-random-port.js +++ b/test/parallel/test-debugger-random-port.js @@ -12,7 +12,7 @@ const assert = require('assert'); { const script = fixtures.path('debugger', 'three-lines.js'); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); cli.waitForInitialBreak() .then(() => cli.waitForPrompt()) diff --git a/test/parallel/test-debugger-restart-message.js b/test/parallel/test-debugger-restart-message.js index e4001b47ee2df4..190d0c18ccc081 100644 --- a/test/parallel/test-debugger-restart-message.js +++ b/test/parallel/test-debugger-restart-message.js @@ -14,7 +14,7 @@ const startCLI = require('../common/debugger'); // Using `restart` should result in only one "Connect/For help" message. { const script = fixtures.path('debugger', 'three-lines.js'); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); const listeningRegExp = /Debugger listening on/g; diff --git a/test/parallel/test-debugger-run-after-quit-restart.js b/test/parallel/test-debugger-run-after-quit-restart.js index 0e1048699206dc..2da4cea6359c85 100644 --- a/test/parallel/test-debugger-run-after-quit-restart.js +++ b/test/parallel/test-debugger-run-after-quit-restart.js @@ -13,7 +13,7 @@ const path = require('path'); { const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); const script = path.relative(process.cwd(), scriptFullPath); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); function onFatal(error) { cli.quit(); diff --git a/test/parallel/test-debugger-sb-before-load.js b/test/parallel/test-debugger-sb-before-load.js index 416147b4bb64c5..e2267156b7420b 100644 --- a/test/parallel/test-debugger-sb-before-load.js +++ b/test/parallel/test-debugger-sb-before-load.js @@ -17,7 +17,7 @@ const script = path.relative(process.cwd(), scriptFullPath); const otherScriptFullPath = fixtures.path('debugger', 'cjs', 'other.js'); const otherScript = path.relative(process.cwd(), otherScriptFullPath); -const cli = startCLI(['--port=0', script]); +const cli = startCLI([script]); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-scripts.js b/test/parallel/test-debugger-scripts.js index 83f578cf1cabbb..b0f611bd1c6491 100644 --- a/test/parallel/test-debugger-scripts.js +++ b/test/parallel/test-debugger-scripts.js @@ -11,7 +11,7 @@ const assert = require('assert'); // List scripts. { const script = fixtures.path('debugger', 'three-lines.js'); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); (async () => { try { diff --git a/test/parallel/test-debugger-set-context-line-number.mjs b/test/parallel/test-debugger-set-context-line-number.mjs index 5c6e281c1d3b4a..adb6d9ab9e52b0 100644 --- a/test/parallel/test-debugger-set-context-line-number.mjs +++ b/test/parallel/test-debugger-set-context-line-number.mjs @@ -7,7 +7,7 @@ import startCLI from '../common/debugger.js'; import assert from 'assert'; const script = path('debugger', 'twenty-lines.js'); -const cli = startCLI(['--port=0', script]); +const cli = startCLI([script]); function onFatal(error) { cli.quit(); diff --git a/test/parallel/test-debugger-unavailable-port.js b/test/parallel/test-debugger-unavailable-port.js index e2920312ffc21c..4a43d643988900 100644 --- a/test/parallel/test-debugger-unavailable-port.js +++ b/test/parallel/test-debugger-unavailable-port.js @@ -19,7 +19,7 @@ const { createServer } = require('net'); try { const script = fixtures.path('debugger', 'three-lines.js'); - const cli = startCLI([`--port=${port}`, script]); + const cli = startCLI([`--port=${port}`, script], [], {}, { randomPort: false }); const code = await cli.quit(); assert.doesNotMatch( diff --git a/test/parallel/test-debugger-use-strict.js b/test/parallel/test-debugger-use-strict.js index dce928697659ea..ae82a9fc82352b 100644 --- a/test/parallel/test-debugger-use-strict.js +++ b/test/parallel/test-debugger-use-strict.js @@ -11,7 +11,7 @@ const assert = require('assert'); // Test for files that start with strict directive. { const script = fixtures.path('debugger', 'use-strict.js'); - const cli = startCLI(['--port=0', script]); + const cli = startCLI([script]); function onFatal(error) { cli.quit(); diff --git a/test/parallel/test-debugger-watch-validation.js b/test/parallel/test-debugger-watch-validation.js index 2ccd889646729d..46307c18d55526 100644 --- a/test/parallel/test-debugger-watch-validation.js +++ b/test/parallel/test-debugger-watch-validation.js @@ -8,7 +8,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); -const cli = startCLI(['--port=0', fixtures.path('debugger/break.js')]); +const cli = startCLI([fixtures.path('debugger/break.js')]); (async () => { await cli.waitForInitialBreak(); diff --git a/test/parallel/test-debugger-watchers.mjs b/test/parallel/test-debugger-watchers.mjs index d2492cde67c84e..4ff7ea00a22258 100644 --- a/test/parallel/test-debugger-watchers.mjs +++ b/test/parallel/test-debugger-watchers.mjs @@ -7,7 +7,7 @@ import startCLI from '../common/debugger.js'; import assert from 'assert'; const script = path('debugger', 'break.js'); -const cli = startCLI(['--port=0', script]); +const cli = startCLI([script]); function onFatal(error) { cli.quit(); diff --git a/test/parallel/test-dns-resolver-max-timeout.js b/test/parallel/test-dns-resolver-max-timeout.js new file mode 100644 index 00000000000000..fff1a705ca4116 --- /dev/null +++ b/test/parallel/test-dns-resolver-max-timeout.js @@ -0,0 +1,77 @@ +'use strict'; +const common = require('../common'); +const dnstools = require('../common/dns'); +const dns = require('dns'); +const assert = require('assert'); +const dgram = require('dgram'); + +[ + -1, + 1.1, + NaN, + undefined, + {}, + [], + null, + function() {}, + Symbol(), + true, + Infinity, +].forEach((maxTimeout) => { + try { + new dns.Resolver({ maxTimeout }); + } catch (e) { + assert.ok(/ERR_OUT_OF_RANGE|ERR_INVALID_ARG_TYPE/i.test(e.code)); + } +}); + +const server = dgram.createSocket('udp4'); +const nxdomain = 'nxdomain.org'; +const domain = 'example.org'; +const answers = [{ type: 'A', address: '1.2.3.4', ttl: 123, domain }]; + +server.on('message', common.mustCallAtLeast((msg, { address, port }) => { + const parsed = dnstools.parseDNSPacket(msg); + if (parsed.questions[0].domain === nxdomain) { + return; + } + assert.strictEqual(parsed.questions[0].domain, domain); + server.send(dnstools.writeDNSPacket({ + id: parsed.id, + questions: parsed.questions, + answers: answers, + }), port, address); +}), 1); + +server.bind(0, common.mustCall(async () => { + const address = server.address(); + // Test if the Resolver works as before. + const resolver = new dns.promises.Resolver({ timeout: 1000, tries: 1, maxTimeout: 1000 }); + resolver.setServers([`127.0.0.1:${address.port}`]); + const res = await resolver.resolveAny('example.org'); + assert.strictEqual(res.length, 1); + assert.strictEqual(res.length, answers.length); + assert.strictEqual(res[0].address, answers[0].address); + + // Test that maxTimeout is effective. + // Without maxTimeout, the timeout will keep increasing when retrying. + const timeout1 = await timeout(address, { timeout: 500, tries: 3 }); + // With maxTimeout, the timeout will always be 500 when retrying. + const timeout2 = await timeout(address, { timeout: 500, tries: 3, maxTimeout: 500 }); + console.log(`timeout1: ${timeout1}, timeout2: ${timeout2}`); + assert.strictEqual(timeout1 !== undefined && timeout2 !== undefined, true); + assert.strictEqual(timeout1 > timeout2, true); + server.close(); +})); + +async function timeout(address, options) { + const start = Date.now(); + const resolver = new dns.promises.Resolver(options); + resolver.setServers([`127.0.0.1:${address.port}`]); + try { + await resolver.resolveAny(nxdomain); + } catch (e) { + assert.strictEqual(e.code, 'ETIMEOUT'); + return Date.now() - start; + } +} diff --git a/test/parallel/test-fs-glob.mjs b/test/parallel/test-fs-glob.mjs index b3708f466379fa..490ea2478232ae 100644 --- a/test/parallel/test-fs-glob.mjs +++ b/test/parallel/test-fs-glob.mjs @@ -2,7 +2,7 @@ import * as common from '../common/index.mjs'; import tmpdir from '../common/tmpdir.js'; import { resolve, dirname, sep, relative, join, isAbsolute } from 'node:path'; import { mkdir, writeFile, symlink, glob as asyncGlob } from 'node:fs/promises'; -import { glob, globSync, Dirent } from 'node:fs'; +import { glob, globSync, Dirent, chmodSync } from 'node:fs'; import { test, describe } from 'node:test'; import { pathToFileURL } from 'node:url'; import { promisify } from 'node:util'; @@ -518,3 +518,24 @@ describe('fsPromises glob - exclude', function() { }); } }); + +describe('glob - with restricted directory', function() { + test('*', async () => { + const restrictedDir = tmpdir.resolve('restricted'); + await mkdir(restrictedDir, { recursive: true }); + chmodSync(restrictedDir, 0o000); + try { + const results = []; + for await (const match of asyncGlob('*', { cwd: restrictedDir })) { + results.push(match); + } + assert.ok(true, 'glob completed without throwing on readdir error'); + } finally { + try { + chmodSync(restrictedDir, 0o755); + } catch { + // ignore + } + } + }); +}); diff --git a/test/parallel/test-fs-open.js b/test/parallel/test-fs-open.js index 56157b0183de63..2d9243955e662d 100644 --- a/test/parallel/test-fs-open.js +++ b/test/parallel/test-fs-open.js @@ -53,7 +53,7 @@ async function promise() { await (await fs.promises.open(__filename, 'r')).close(); } -promise().then(common.mustCall()).catch(common.mustNotCall()); +promise().then(common.mustCall()); assert.throws( () => fs.open(__filename, 'r', 'boom', common.mustNotCall()), diff --git a/test/parallel/test-fs-read-zero-length.js b/test/parallel/test-fs-read-zero-length.js index ac2efc73f5107b..1bac7ed210319c 100644 --- a/test/parallel/test-fs-read-zero-length.js +++ b/test/parallel/test-fs-read-zero-length.js @@ -8,7 +8,7 @@ const fd = fs.openSync(filepath, 'r'); const bufferAsync = Buffer.alloc(0); const bufferSync = Buffer.alloc(0); -fs.read(fd, bufferAsync, 0, 0, 0, common.mustCall((err, bytesRead) => { +fs.read(fd, bufferAsync, 0, 0, 0, common.mustSucceed((bytesRead) => { assert.strictEqual(bytesRead, 0); assert.deepStrictEqual(bufferAsync, Buffer.alloc(0)); })); diff --git a/test/parallel/test-http-keep-alive-timeout-buffer.js b/test/parallel/test-http-keep-alive-timeout-buffer.js new file mode 100644 index 00000000000000..fee8a26f66a637 --- /dev/null +++ b/test/parallel/test-http-keep-alive-timeout-buffer.js @@ -0,0 +1,39 @@ +'use strict'; + +const common = require('../common'); +const http = require('http'); +const assert = require('assert'); + +const server = http.createServer(common.mustCall((req, res) => { + const body = 'buffer test\n'; + + res.writeHead(200, { 'Content-Length': body.length }); + res.write(body); + res.end(); +})); + +server.keepAliveTimeout = 100; + +if (server.keepAliveTimeoutBuffer === undefined) { + server.keepAliveTimeoutBuffer = 1000; +} +assert.strictEqual(server.keepAliveTimeoutBuffer, 1000); + +server.listen(0, () => { + http.get({ + port: server.address().port, + path: '/', + }, (res) => { + res.resume(); + server.close(); + }); +}); + +{ + const customBuffer = 3000; + const server = http.createServer(() => {}); + server.keepAliveTimeout = 200; + server.keepAliveTimeoutBuffer = customBuffer; + assert.strictEqual(server.keepAliveTimeoutBuffer, customBuffer); + server.close(); +} diff --git a/test/parallel/test-http-outgoing-buffer.js b/test/parallel/test-http-outgoing-buffer.js index d7db15f0ffae9c..ba102b382542b1 100644 --- a/test/parallel/test-http-outgoing-buffer.js +++ b/test/parallel/test-http-outgoing-buffer.js @@ -1,8 +1,7 @@ -// Flags: --expose-internals 'use strict'; require('../common'); const assert = require('assert'); -const { getDefaultHighWaterMark } = require('internal/streams/state'); +const { getDefaultHighWaterMark } = require('stream'); const http = require('http'); const OutgoingMessage = http.OutgoingMessage; diff --git a/test/parallel/test-http2-raw-headers.js b/test/parallel/test-http2-raw-headers.js index cbcc73692b0a3d..8a84542a130fae 100644 --- a/test/parallel/test-http2-raw-headers.js +++ b/test/parallel/test-http2-raw-headers.js @@ -39,6 +39,16 @@ const http2 = require('http2'); 'a', 'c', ]).end(); + assert.deepStrictEqual(req.sentHeaders, { + '__proto__': null, + ':path': '/foobar', + ':scheme': 'http', + ':authority': `localhost:${server.address().port}`, + ':method': 'GET', + 'a': [ 'b', 'c' ], + 'x-FOO': 'bar', + }); + req.on('response', common.mustCall((headers) => { assert.strictEqual(headers[':status'], 200); client.close(); diff --git a/test/parallel/test-http2-sensitive-headers.js b/test/parallel/test-http2-sensitive-headers.js index bbd00ae0cdd117..006070d6337c49 100644 --- a/test/parallel/test-http2-sensitive-headers.js +++ b/test/parallel/test-http2-sensitive-headers.js @@ -72,6 +72,16 @@ const { duplexPair } = require('stream'); const req = client.request(rawHeaders); + assert.deepStrictEqual(req.sentHeaders, { + '__proto__': null, + ':method': 'GET', + ':authority': 'localhost:80', + ':scheme': 'http', + ':path': '/', + 'secret': 'secret-value', + [http2.sensitiveHeaders]: [ 'secret' ], + }); + req.on('response', common.mustCall((headers) => { assert.strictEqual(headers[':status'], 200); })); diff --git a/test/parallel/test-net-blocklist.js b/test/parallel/test-net-blocklist.js index 901b9a4dfb7b02..7048405aa97fec 100644 --- a/test/parallel/test-net-blocklist.js +++ b/test/parallel/test-net-blocklist.js @@ -3,7 +3,6 @@ const common = require('../common'); const net = require('net'); const assert = require('assert'); - const blockList = new net.BlockList(); blockList.addAddress('127.0.0.1'); blockList.addAddress('127.0.0.2'); diff --git a/test/parallel/test-net-listen-exclusive-random-ports.js b/test/parallel/test-net-listen-exclusive-random-ports.js index 66dfb598204e7b..b9e92a750dbb64 100644 --- a/test/parallel/test-net-listen-exclusive-random-ports.js +++ b/test/parallel/test-net-listen-exclusive-random-ports.js @@ -16,7 +16,7 @@ if (cluster.isPrimary) { worker2.on('message', function(port2) { assert.strictEqual(port2, port2 | 0, `second worker could not listen on port ${port2}`); - assert.notStrictEqual(port1, port2, 'ports should not be equal'); + assert.notStrictEqual(port1, port2); worker1.kill(); worker2.kill(); }); diff --git a/test/parallel/test-node-output-sourcemaps.mjs b/test/parallel/test-node-output-sourcemaps.mjs index 81c36934ba0f3e..c11c2c36735dae 100644 --- a/test/parallel/test-node-output-sourcemaps.mjs +++ b/test/parallel/test-node-output-sourcemaps.mjs @@ -27,6 +27,7 @@ describe('sourcemaps output', { concurrency: !process.env.TEST_PARALLEL }, () => { name: 'source-map/output/source_map_sourcemapping_url_string.js' }, { name: 'source-map/output/source_map_throw_async_stack_trace.mjs' }, { name: 'source-map/output/source_map_throw_catch.js' }, + { name: 'source-map/output/source_map_throw_class_method.js' }, { name: 'source-map/output/source_map_throw_construct.mjs' }, { name: 'source-map/output/source_map_throw_first_tick.js' }, { name: 'source-map/output/source_map_throw_icu.js' }, diff --git a/test/parallel/test-path-win32-normalize-device-names.js b/test/parallel/test-path-win32-normalize-device-names.js index 927bc5cec8a2e5..2c6dcf142a2674 100644 --- a/test/parallel/test-path-win32-normalize-device-names.js +++ b/test/parallel/test-path-win32-normalize-device-names.js @@ -45,10 +45,18 @@ const normalizeDeviceNameTests = [ { input: 'COM1:', expected: '.\\COM1:.' }, { input: 'COM9:', expected: '.\\COM9:.' }, + { input: 'COM¹:', expected: '.\\COM¹:.' }, + { input: 'COM²:', expected: '.\\COM²:.' }, + { input: 'COM³:', expected: '.\\COM³:.' }, { input: 'COM1:.\\..\\..\\foo', expected: '.\\COM1:..\\..\\foo' }, + { input: 'COM¹:.\\..\\..\\foo', expected: '.\\COM¹:..\\..\\foo' }, { input: 'LPT1:', expected: '.\\LPT1:.' }, + { input: 'LPT¹:', expected: '.\\LPT¹:.' }, + { input: 'LPT²:', expected: '.\\LPT²:.' }, + { input: 'LPT³:', expected: '.\\LPT³:.' }, { input: 'LPT9:', expected: '.\\LPT9:.' }, { input: 'LPT1:.\\..\\..\\foo', expected: '.\\LPT1:..\\..\\foo' }, + { input: 'LPT¹:.\\..\\..\\foo', expected: '.\\LPT¹:..\\..\\foo' }, { input: 'LpT5:/another/path', expected: '.\\LpT5:another\\path' }, { input: 'C:\\foo', expected: 'C:\\foo' }, diff --git a/test/parallel/test-perf-hooks-histogram.js b/test/parallel/test-perf-hooks-histogram.js index 37fcdfb3fca06c..e625f4eba09b50 100644 --- a/test/parallel/test-perf-hooks-histogram.js +++ b/test/parallel/test-perf-hooks-histogram.js @@ -3,6 +3,7 @@ const common = require('../common'); const { + deepStrictEqual, ok, strictEqual, throws, @@ -58,6 +59,10 @@ const { inspect } = require('util'); strictEqual(h.percentileBigInt(1), 1n); strictEqual(h.percentileBigInt(100), 1n); + deepStrictEqual(h.percentiles, new Map([[0, 1], [100, 1]])); + + deepStrictEqual(h.percentilesBigInt, new Map([[0, 1n], [100, 1n]])); + const mc = new MessageChannel(); mc.port1.onmessage = common.mustCall(({ data }) => { strictEqual(h.min, 1); diff --git a/test/parallel/test-process-env-allowed-flags-are-documented.js b/test/parallel/test-process-env-allowed-flags-are-documented.js index 6a79aaebbf959b..afd43cfffe638f 100644 --- a/test/parallel/test-process-env-allowed-flags-are-documented.js +++ b/test/parallel/test-process-env-allowed-flags-are-documented.js @@ -119,6 +119,7 @@ assert(undocumented.delete('--no-debug-arraybuffer-allocations')); assert(undocumented.delete('--es-module-specifier-resolution')); assert(undocumented.delete('--experimental-report')); assert(undocumented.delete('--experimental-worker')); +assert(undocumented.delete('--experimental-wasm-modules')); assert(undocumented.delete('--node-snapshot')); assert(undocumented.delete('--no-node-snapshot')); assert(undocumented.delete('--loader')); diff --git a/test/parallel/test-process-execve-no-args.js b/test/parallel/test-process-execve-no-args.js new file mode 100644 index 00000000000000..908512622cf00f --- /dev/null +++ b/test/parallel/test-process-execve-no-args.js @@ -0,0 +1,25 @@ +'use strict'; + +const { skip, isWindows, isIBMi } = require('../common'); +const { fail } = require('assert'); +const { isMainThread } = require('worker_threads'); +const { dirname, join } = require('path'); +const { existsSync } = require('fs'); + +if (!isMainThread) { + skip('process.execve is not available in Workers'); +} else if (isWindows || isIBMi) { + skip('process.execve is not available in Windows or IBM i'); +} + +// Get full path to the executable used for the test +const executable = join(dirname(process.execPath), 'nop'); + +// Sanity check that the binary exists +if (!existsSync(executable)) { + skip(executable + ' binary is not available'); +} + +process.execve(executable); +// If process.execve succeeds, this should never be executed. +fail('process.execve failed'); diff --git a/test/parallel/test-process-threadCpuUsage-main-thread.js b/test/parallel/test-process-threadCpuUsage-main-thread.js new file mode 100644 index 00000000000000..82c98783987c2a --- /dev/null +++ b/test/parallel/test-process-threadCpuUsage-main-thread.js @@ -0,0 +1,87 @@ +'use strict'; + +const { isSunOS } = require('../common'); + +const { ok, throws, notStrictEqual } = require('assert'); + +function validateResult(result) { + notStrictEqual(result, null); + + ok(Number.isFinite(result.user)); + ok(Number.isFinite(result.system)); + + ok(result.user >= 0); + ok(result.system >= 0); +} + +// Test that process.threadCpuUsage() works on the main thread +// The if check and the else branch should be removed once SmartOS support is fixed in +// https://github.com/libuv/libuv/issues/4706 +if (!isSunOS) { + const result = process.threadCpuUsage(); + + // Validate the result of calling with no previous value argument. + validateResult(process.threadCpuUsage()); + + // Validate the result of calling with a previous value argument. + validateResult(process.threadCpuUsage(result)); + + // Ensure the results are >= the previous. + let thisUsage; + let lastUsage = process.threadCpuUsage(); + for (let i = 0; i < 10; i++) { + thisUsage = process.threadCpuUsage(); + validateResult(thisUsage); + ok(thisUsage.user >= lastUsage.user); + ok(thisUsage.system >= lastUsage.system); + lastUsage = thisUsage; + } +} else { + throws( + () => process.threadCpuUsage(), + { + code: 'ERR_OPERATION_FAILED', + name: 'Error', + message: 'Operation failed: threadCpuUsage is not available on SunOS' + } + ); +} + +// Test argument validaton +{ + throws( + () => process.threadCpuUsage(123), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "prevValue" argument must be of type object. Received type number (123)' + } + ); + + throws( + () => process.threadCpuUsage([]), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "prevValue" argument must be of type object. Received an instance of Array' + } + ); + + throws( + () => process.threadCpuUsage({ user: -123 }), + { + code: 'ERR_INVALID_ARG_VALUE', + name: 'RangeError', + message: "The property 'prevValue.user' is invalid. Received -123" + } + ); + + throws( + () => process.threadCpuUsage({ user: 0, system: 'bar' }), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: "The \"prevValue.system\" property must be of type number. Received type string ('bar')" + } + ); +} diff --git a/test/parallel/test-process-threadCpuUsage-worker-threads.js b/test/parallel/test-process-threadCpuUsage-worker-threads.js new file mode 100644 index 00000000000000..e306e08e8266e2 --- /dev/null +++ b/test/parallel/test-process-threadCpuUsage-worker-threads.js @@ -0,0 +1,91 @@ +'use strict'; + +const { mustCall, platformTimeout, hasCrypto, skip, isSunOS } = require('../common'); + +if (!hasCrypto) { + skip('missing crypto'); +}; + +// This block can be removed once SmartOS support is fixed in +// https://github.com/libuv/libuv/issues/4706 +// The behavior on SunOS is tested in +// test/parallel/test-process-threadCpuUsage-main-thread.js +if (isSunOS) { + skip('Operation not supported yet on SmartOS'); +} + +const { ok } = require('assert'); +const { randomBytes, createHash } = require('crypto'); +const { once } = require('events'); +const { Worker, parentPort, workerData } = require('worker_threads'); + +const FREQUENCIES = [100, 500, 1000]; + +function performLoad() { + const buffer = randomBytes(1e8); + + // Do some work + return setInterval(() => { + createHash('sha256').update(buffer).end(buffer); + }, platformTimeout(workerData?.frequency ?? 100)); +} + +function getUsages() { + return { process: process.cpuUsage(), thread: process.threadCpuUsage() }; +} + +function validateResults(results) { + // This test should have checked that the CPU usage of each thread is greater + // than the previous one, while the process one was not. + // Unfortunately, the real values are not really predictable on the CI so we + // just check that all the values are positive numbers. + for (let i = 0; i < 3; i++) { + ok(typeof results[i].process.user === 'number'); + ok(results[i].process.user >= 0); + + ok(typeof results[i].process.system === 'number'); + ok(results[i].process.system >= 0); + + ok(typeof results[i].thread.user === 'number'); + ok(results[i].thread.user >= 0); + + ok(typeof results[i].thread.system === 'number'); + ok(results[i].thread.system >= 0); + } +} + +// The main thread will spawn three more threads, then after a while it will ask all of them to +// report the thread CPU usage and exit. +if (!workerData?.frequency) { // Do not use isMainThread here otherwise test will not run in --worker mode + const workers = []; + for (const frequency of FREQUENCIES) { + workers.push(new Worker(__filename, { workerData: { frequency } })); + } + + setTimeout(mustCall(async () => { + clearInterval(interval); + + const results = [getUsages()]; + + for (const worker of workers) { + const statusPromise = once(worker, 'message'); + + worker.postMessage('done'); + const [status] = await statusPromise; + results.push(status); + worker.terminate(); + } + + validateResults(results); + }), platformTimeout(5000)); + +} else { + parentPort.on('message', () => { + clearInterval(interval); + parentPort.postMessage(getUsages()); + process.exit(0); + }); +} + +// Perform load on each thread +const interval = performLoad(); diff --git a/test/parallel/test-runner-assert.js b/test/parallel/test-runner-assert.js index 74384947278e4e..c74f9d03a28ee4 100644 --- a/test/parallel/test-runner-assert.js +++ b/test/parallel/test-runner-assert.js @@ -8,6 +8,8 @@ test('expected methods are on t.assert', (t) => { 'AssertionError', 'CallTracker', 'strict', + 'Assert', + 'options', ]; const assertKeys = Object.keys(assert).filter((key) => !uncopiedKeys.includes(key)); const expectedKeys = ['snapshot', 'fileSnapshot'].concat(assertKeys).sort(); diff --git a/test/parallel/test-single-executable-blob-config-errors.js b/test/parallel/test-single-executable-blob-config-errors.js index 364a533c0c90fb..a30850010e2e4d 100644 --- a/test/parallel/test-single-executable-blob-config-errors.js +++ b/test/parallel/test-single-executable-blob-config-errors.js @@ -5,113 +5,88 @@ require('../common'); const tmpdir = require('../common/tmpdir'); const { writeFileSync, mkdirSync } = require('fs'); -const { spawnSync } = require('child_process'); -const assert = require('assert'); +const { spawnSyncAndAssert } = require('../common/child_process'); { tmpdir.refresh(); const config = 'non-existent-relative.json'; - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot read single executable configuration from non-existent-relative\.json/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert.match( - stderr, - /Cannot read single executable configuration from non-existent-relative\.json/ - ); } { tmpdir.refresh(); const config = tmpdir.resolve('non-existent-absolute.json'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot read single executable configuration from .*non-existent-absolute\.json/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `Cannot read single executable configuration from ${config}` - ) - ); } { tmpdir.refresh(); const config = tmpdir.resolve('invalid.json'); writeFileSync(config, '\n{\n"main"', 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /INCOMPLETE_ARRAY_OR_OBJECT/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert.match(stderr, /SyntaxError: Expected ':' after property name/); - assert( - stderr.includes( - `Cannot parse JSON from ${config}` - ) - ); } { tmpdir.refresh(); const config = tmpdir.resolve('empty.json'); writeFileSync(config, '{}', 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /"main" field of .*empty\.json is not a non-empty string/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `"main" field of ${config} is not a non-empty string` - ) - ); } { tmpdir.refresh(); const config = tmpdir.resolve('no-main.json'); writeFileSync(config, '{"output": "test.blob"}', 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /"main" field of .*no-main\.json is not a non-empty string/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `"main" field of ${config} is not a non-empty string` - ) - ); } { tmpdir.refresh(); const config = tmpdir.resolve('no-output.json'); writeFileSync(config, '{"main": "bundle.js"}', 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /"output" field of .*no-output\.json is not a non-empty string/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `"output" field of ${config} is not a non-empty string` - ) - ); } { @@ -124,32 +99,28 @@ const assert = require('assert'); "disableExperimentalSEAWarning": "💥" } `, 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /"disableExperimentalSEAWarning" field of .*invalid-disableExperimentalSEAWarning\.json is not a Boolean/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `"disableExperimentalSEAWarning" field of ${config} is not a Boolean` - ) - ); } { tmpdir.refresh(); const config = tmpdir.resolve('nonexistent-main-relative.json'); writeFileSync(config, '{"main": "bundle.js", "output": "sea.blob"}', 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot read main script .*bundle\.js/ }); - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert.match(stderr, /Cannot read main script bundle\.js/); } { @@ -161,19 +132,14 @@ const assert = require('assert'); output: 'sea.blob' }); writeFileSync(config, configJson, 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot read main script .*bundle\.js/ }); - - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `Cannot read main script ${main}` - ) - ); } { @@ -188,19 +154,14 @@ const assert = require('assert'); output, }); writeFileSync(config, configJson, 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot write output to .*output-dir/ }); - - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert( - stderr.includes( - `Cannot write output to ${output}` - ) - ); } { @@ -215,13 +176,12 @@ const assert = require('assert'); output: 'output-dir' }); writeFileSync(config, configJson, 'utf8'); - const child = spawnSync( + spawnSyncAndAssert( process.execPath, ['--experimental-sea-config', config], { cwd: tmpdir.path, + }, { + status: 1, + stderr: /Cannot write output to output-dir/ }); - - const stderr = child.stderr.toString(); - assert.strictEqual(child.status, 1); - assert.match(stderr, /Cannot write output to output-dir/); } diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js index 28975de405133f..beece038772a34 100644 --- a/test/parallel/test-stream-readable-async-iterators.js +++ b/test/parallel/test-stream-readable-async-iterators.js @@ -690,7 +690,7 @@ async function tests() { const it = r[Symbol.asyncIterator](); const p = it.return(); r.emit('close'); - p.then(common.mustCall()).catch(common.mustNotCall()); + p.then(common.mustCall()); } { diff --git a/test/parallel/test-stream-readable-to-web.js b/test/parallel/test-stream-readable-to-web.js deleted file mode 100644 index 753672b509c173..00000000000000 --- a/test/parallel/test-stream-readable-to-web.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; -const common = require('../common'); -if (!common.hasCrypto) { common.skip('missing crypto'); } - -const { Readable } = require('stream'); -const process = require('process'); -const { randomBytes } = require('crypto'); -const assert = require('assert'); - -// Based on: https://github.com/nodejs/node/issues/46347#issuecomment-1413886707 -// edit: make it cross-platform as /dev/urandom is not available on Windows -{ - let currentMemoryUsage = process.memoryUsage().arrayBuffers; - - // We initialize a stream, but not start consuming it - const randomNodeStream = new Readable({ - read(size) { - randomBytes(size, (err, buffer) => { - if (err) { - // If an error occurs, emit an 'error' event - this.emit('error', err); - return; - } - - // Push the random bytes to the stream - this.push(buffer); - }); - } - }); - // after 2 seconds, it'll get converted to web stream - let randomWebStream; - - // We check memory usage every second - // since it's a stream, it shouldn't be higher than the chunk size - const reportMemoryUsage = () => { - const { arrayBuffers } = process.memoryUsage(); - currentMemoryUsage = arrayBuffers; - - assert(currentMemoryUsage <= 256 * 1024 * 1024); - }; - setInterval(reportMemoryUsage, 1000); - - // after 1 second we use Readable.toWeb - // memory usage should stay pretty much the same since it's still a stream - setTimeout(() => { - randomWebStream = Readable.toWeb(randomNodeStream); - }, 1000); - - // after 2 seconds we start consuming the stream - // memory usage will grow, but the old chunks should be garbage-collected pretty quickly - setTimeout(async () => { - // eslint-disable-next-line no-unused-vars - for await (const _ of randomWebStream) { - // Do nothing, just let the stream flow - } - }, 2000); - - setTimeout(() => { - // Test considered passed if we don't crash - process.exit(0); - }, 5000); -} diff --git a/test/parallel/test-stream-readable-to-web.mjs b/test/parallel/test-stream-readable-to-web.mjs new file mode 100644 index 00000000000000..8ff9f2fb16ed1c --- /dev/null +++ b/test/parallel/test-stream-readable-to-web.mjs @@ -0,0 +1,64 @@ +import { mustCall } from '../common/index.mjs'; +import { Readable } from 'node:stream'; +import { memoryUsage } from 'node:process'; +import assert from 'node:assert'; +import { setImmediate } from 'node:timers/promises'; + +// Based on: https://github.com/nodejs/node/issues/46347#issuecomment-1413886707 +// edit: make it cross-platform as /dev/urandom is not available on Windows + +const MAX_MEM = 256 * 1024 * 1024; // 256 MiB + +function checkMemoryUsage() { + assert(memoryUsage().arrayBuffers < MAX_MEM); +} + +const MAX_BUFFERS = 1000; +let buffersCreated = 0; + +const randomNodeStream = new Readable({ + read(size) { + if (buffersCreated >= MAX_BUFFERS) { + this.push(null); + return; + } + + this.push(Buffer.alloc(size)); + buffersCreated++; + } +}); + +randomNodeStream.on('error', (err) => { + assert.fail(err); +}); + +// Before doing anything, make sure memory usage is okay +checkMemoryUsage(); + +// Create stream and check memory usage remains okay + +const randomWebStream = Readable.toWeb(randomNodeStream); + +checkMemoryUsage(); + +let timeout; +try { + // Wait two seconds before consuming the stream to see if memory usage increases + timeout = setTimeout(mustCall(async () => { + // Did the stream leak memory? + checkMemoryUsage(); + // eslint-disable-next-line no-unused-vars + for await (const _ of randomWebStream) { + // Yield event loop to allow garbage collection + await setImmediate(); + // consume the stream + // check memory usage remains okay + checkMemoryUsage(); + } + }), 2000); +} catch (err) { + if (timeout) { + clearTimeout(timeout); + } + assert.fail(err); +} diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js index 5c86410ed74c09..438e21724e4060 100644 --- a/test/parallel/test-stream-toArray.js +++ b/test/parallel/test-stream-toArray.js @@ -59,7 +59,7 @@ const assert = require('assert'); const ac = new AbortController(); let stream; assert.rejects(async () => { - stream = Readable.from([1, 2, 3]).map(async (x) => { + stream = Readable.from([1, 2, 3, 4]).map(async (x) => { if (x === 3) { await new Promise(() => {}); // Explicitly do not pass signal here } @@ -69,8 +69,8 @@ const assert = require('assert'); }, { name: 'AbortError', }).then(common.mustCall(() => { - // Only stops toArray, does not destroy the stream - assert(stream.destroyed, false); + // Stops toArray *and* destroys the stream + assert.strictEqual(stream.destroyed, true); })); ac.abort(); } diff --git a/test/parallel/test-tls-get-ca-certificates-node-use-system-ca.js b/test/parallel/test-tls-get-ca-certificates-node-use-system-ca.js new file mode 100644 index 00000000000000..81a5cba4da77e2 --- /dev/null +++ b/test/parallel/test-tls-get-ca-certificates-node-use-system-ca.js @@ -0,0 +1,29 @@ +'use strict'; +// This tests that NODE_USE_SYSTEM_CA environment variable works the same +// as --use-system-ca flag by comparing certificate counts. + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const tls = require('tls'); +const { spawnSyncAndExitWithoutError } = require('../common/child_process'); + +const systemCerts = tls.getCACertificates('system'); +if (systemCerts.length === 0) { + common.skip('no system certificates available'); +} + +const { child: { stdout: expectedLength } } = spawnSyncAndExitWithoutError(process.execPath, [ + '--use-system-ca', + '-p', + `tls.getCACertificates('default').length`, +], { + env: { ...process.env, NODE_USE_SYSTEM_CA: '0' }, +}); + +spawnSyncAndExitWithoutError(process.execPath, [ + '-p', + `assert.strictEqual(tls.getCACertificates('default').length, ${expectedLength.toString()})`, +], { + env: { ...process.env, NODE_USE_SYSTEM_CA: '1' }, +}); diff --git a/test/parallel/test-tls-session-timeout-errors.js b/test/parallel/test-tls-session-timeout-errors.js new file mode 100644 index 00000000000000..6e5646127c80b4 --- /dev/null +++ b/test/parallel/test-tls-session-timeout-errors.js @@ -0,0 +1,36 @@ +'use strict'; +// This tests validation of sessionTimeout option in TLS server. +const common = require('../common'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); + +const assert = require('assert'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); + +const key = fixtures.readKey('rsa_private.pem'); +const cert = fixtures.readKey('rsa_cert.crt'); + +// Node.js should not allow setting negative timeouts since new versions of +// OpenSSL do not handle those as users might expect + +for (const sessionTimeout of [-1, -100, -(2 ** 31)]) { + assert.throws(() => { + tls.createServer({ + key: key, + cert: cert, + ca: [cert], + sessionTimeout, + maxVersion: 'TLSv1.2', + }); + }, { + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "options.sessionTimeout" is out of range. It ' + + `must be >= 0 && <= ${2 ** 31 - 1}. Received ${sessionTimeout}`, + }); +} diff --git a/test/parallel/test-tls-set-default-ca-certificates-append-fetch.mjs b/test/parallel/test-tls-set-default-ca-certificates-append-fetch.mjs new file mode 100644 index 00000000000000..ca8436b22b0a70 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-append-fetch.mjs @@ -0,0 +1,54 @@ +// Flags: --no-use-system-ca + + +// This tests appending certificates to existing defaults should work correctly +// with fetch. + +import * as common from '../common/index.mjs'; +import { once } from 'node:events'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; + +if (!common.hasCrypto) common.skip('missing crypto'); + +const { includesCert } = await import('../common/tls.js'); +const { default: https } = await import('node:https'); +const { default: tls } = await import('node:tls'); + +const bundledCerts = tls.getCACertificates('bundled'); +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); +if (includesCert(bundledCerts, fixtureCert)) { + common.skip('fake-startcom-root-cert is already in bundled certificates, skipping test'); +} + +// Test HTTPS connection fails with bundled CA, succeeds after adding custom CA +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('hello world'); +}, 1)); +server.listen(0); +await once(server, 'listening'); +const url = `https://localhost:${server.address().port}/hello-world`; + +// First attempt should fail without custom CA. +await assert.rejects( + fetch(url), + (err) => { + assert.strictEqual(err.cause.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); + return true; + }, +); + +// Now enable custom CA certificate. +tls.setDefaultCACertificates([fixtureCert]); + +// Second attempt should succeed. +const response = await fetch(url); +assert.strictEqual(response.status, 200); +const text = await response.text(); +assert.strictEqual(text, 'hello world'); + +server.close(); diff --git a/test/parallel/test-tls-set-default-ca-certificates-append-https-request.js b/test/parallel/test-tls-set-default-ca-certificates-append-https-request.js new file mode 100644 index 00000000000000..d7a3baded420de --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-append-https-request.js @@ -0,0 +1,71 @@ +'use strict'; + +// This tests appending certificates to existing defaults should work correctly +// with https.request(). + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const assert = require('assert'); +const https = require('https'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); +const { includesCert } = require('../common/tls'); + +const bundledCerts = tls.getCACertificates('bundled'); +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); +if (includesCert(bundledCerts, fixtureCert)) { + common.skip('fake-startcom-root-cert is already in bundled certificates, skipping test'); +} + +// Test HTTPS connection fails with bundled CA, succeeds after adding custom CA +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, (req, res) => { + res.writeHead(200); + res.end('success'); +}); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + + // Set to bundled CA certificates - connection should fail + tls.setDefaultCACertificates(bundledCerts); + + const req1 = https.request({ + hostname: 'localhost', + port: port, + path: '/', + method: 'GET' + }, common.mustNotCall('Should not succeed with bundled CA only')); + + req1.on('error', common.mustCall((err) => { + console.log(err); + // Should fail with certificate verification error + assert.strictEqual(err.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); + + // Now add the fake-startcom-root-cert to bundled certs - connection should succeed + tls.setDefaultCACertificates([...bundledCerts, fixtureCert]); + + const req2 = https.request({ + hostname: 'localhost', + port: port, + path: '/', + method: 'GET' + }, common.mustCall((res) => { + assert.strictEqual(res.statusCode, 200); + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', common.mustCall(() => { + assert.strictEqual(data, 'success'); + server.close(); + })); + })); + + req2.on('error', common.mustNotCall('Should not error with correct CA added')); + req2.end(); + })); + + req1.end(); +})); diff --git a/test/parallel/test-tls-set-default-ca-certificates-array-buffer.js b/test/parallel/test-tls-set-default-ca-certificates-array-buffer.js new file mode 100644 index 00000000000000..0ea30721e57c32 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-array-buffer.js @@ -0,0 +1,39 @@ +// Flags: --no-use-system-ca +'use strict'; + +// This tests tls.setDefaultCACertificates() support ArrayBufferView. + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const tls = require('tls'); +const fixtures = require('../common/fixtures'); +const { assertEqualCerts } = require('../common/tls'); + +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); + +// Should accept Buffer. +tls.setDefaultCACertificates([Buffer.from(fixtureCert)]); +const result = tls.getCACertificates('default'); +assertEqualCerts(result, [fixtureCert]); + +// Reset it to empty. +tls.setDefaultCACertificates([]); +assertEqualCerts(tls.getCACertificates('default'), []); + +// Should accept Uint8Array. +const encoder = new TextEncoder(); +const uint8Cert = encoder.encode(fixtureCert); +tls.setDefaultCACertificates([uint8Cert]); +const uint8Result = tls.getCACertificates('default'); +assertEqualCerts(uint8Result, [fixtureCert]); + +// Reset it to empty. +tls.setDefaultCACertificates([]); +assertEqualCerts(tls.getCACertificates('default'), []); + +// Should accept DataView. +const dataViewCert = new DataView(uint8Cert.buffer, uint8Cert.byteOffset, uint8Cert.byteLength); +tls.setDefaultCACertificates([dataViewCert]); +const dataViewResult = tls.getCACertificates('default'); +assertEqualCerts(dataViewResult, [fixtureCert]); diff --git a/test/parallel/test-tls-set-default-ca-certificates-basic.js b/test/parallel/test-tls-set-default-ca-certificates-basic.js new file mode 100644 index 00000000000000..f6772110e54e36 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-basic.js @@ -0,0 +1,58 @@ +'use strict'; + +// This tests the basic functionality of tls.setDefaultCACertificates(). + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const tls = require('tls'); +const fixtures = require('../common/fixtures'); +const { assertEqualCerts } = require('../common/tls'); + +const originalBundled = tls.getCACertificates('bundled'); +const originalSystem = tls.getCACertificates('system'); +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); + +function testSetCertificates(certs) { + // Test setting it can be verified with tls.getCACertificates(). + tls.setDefaultCACertificates(certs); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, certs); + + // Verify that other certificate types are unchanged + const newBundled = tls.getCACertificates('bundled'); + const newSystem = tls.getCACertificates('system'); + assertEqualCerts(newBundled, originalBundled); + assertEqualCerts(newSystem, originalSystem); + + // Test implicit defaults. + const implicitDefaults = tls.getCACertificates(); + assertEqualCerts(implicitDefaults, certs); + + // Test cached results. + const cachedResult = tls.getCACertificates('default'); + assertEqualCerts(cachedResult, certs); + const cachedImplicitDefaults = tls.getCACertificates(); + assertEqualCerts(cachedImplicitDefaults, certs); +} + +// Test setting with fixture certificate. +testSetCertificates([fixtureCert]); + +// Test setting with empty array. +testSetCertificates([]); + +// Test setting with bundled certificates +testSetCertificates(originalBundled); + +// Test combining bundled and extra certificates. +testSetCertificates([...originalBundled, fixtureCert]); + +// Test setting with a subset of bundled certificates +if (originalBundled.length >= 3) { + testSetCertificates(originalBundled.slice(0, 3)); +} + +// Test duplicate certificates +tls.setDefaultCACertificates([fixtureCert, fixtureCert, fixtureCert]); +assertEqualCerts(tls.getCACertificates('default'), [fixtureCert]); diff --git a/test/parallel/test-tls-set-default-ca-certificates-error.js b/test/parallel/test-tls-set-default-ca-certificates-error.js new file mode 100644 index 00000000000000..1d529a97265a14 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-error.js @@ -0,0 +1,41 @@ +'use strict'; + +// This tests input validation of tls.setDefaultCACertificates(). + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const fixtures = require('../common/fixtures'); +const assert = require('assert'); +const tls = require('tls'); +const { assertEqualCerts } = require('../common/tls'); + +const defaultCerts = tls.getCACertificates('default'); +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); + +for (const invalid of [null, undefined, 'string', 42, {}, true]) { + // Test input validation - should throw when not passed an array + assert.throws(() => tls.setDefaultCACertificates(invalid), { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "certs" argument must be an instance of Array/ + }); + // Verify that default certificates remain unchanged after error. + assertEqualCerts(tls.getCACertificates('default'), defaultCerts); +} + +for (const invalid of [null, undefined, 42, {}, true]) { + // Test input validation - should throw when passed an array with invalid elements + assert.throws(() => tls.setDefaultCACertificates([invalid]), { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "certs\[0\]" argument must be of type string or an instance of ArrayBufferView/ + }); + // Verify that default certificates remain unchanged after error. + assertEqualCerts(tls.getCACertificates('default'), defaultCerts); + + assert.throws(() => tls.setDefaultCACertificates([fixtureCert, invalid]), { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "certs\[1\]" argument must be of type string or an instance of ArrayBufferView/ + }); + // Verify that default certificates remain unchanged after error. + assertEqualCerts(tls.getCACertificates('default'), defaultCerts); +} diff --git a/test/parallel/test-tls-set-default-ca-certificates-extra-override.js b/test/parallel/test-tls-set-default-ca-certificates-extra-override.js new file mode 100644 index 00000000000000..cf7790e5d0835f --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-extra-override.js @@ -0,0 +1,19 @@ +'use strict'; + +// This tests that tls.setDefaultCACertificates() properly overrides certificates +// added through NODE_EXTRA_CA_CERTS environment variable. + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const fixtures = require('../common/fixtures'); +const { spawnSyncAndExitWithoutError } = require('../common/child_process'); + +spawnSyncAndExitWithoutError(process.execPath, [ + fixtures.path('tls-extra-ca-override.js'), +], { + env: { + ...process.env, + NODE_EXTRA_CA_CERTS: fixtures.path('keys', 'fake-startcom-root-cert.pem') + } +}); diff --git a/test/parallel/test-tls-set-default-ca-certificates-mixed-types.js b/test/parallel/test-tls-set-default-ca-certificates-mixed-types.js new file mode 100644 index 00000000000000..2f22ed8ec34341 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-mixed-types.js @@ -0,0 +1,46 @@ +'use strict'; + +// This tests mixed input types for tls.setDefaultCACertificates(). + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const tls = require('tls'); +const { assertEqualCerts } = require('../common/tls'); + +const bundledCerts = tls.getCACertificates('bundled'); +if (bundledCerts.length < 4) { + common.skip('Not enough bundled CA certificates available'); +} + +const encoder = new TextEncoder(); + +// Test mixed array with string and Buffer. +{ + tls.setDefaultCACertificates([bundledCerts[0], Buffer.from(bundledCerts[1], 'utf8')]); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, [bundledCerts[0], bundledCerts[1]]); +} + +// Test mixed array with string and Uint8Array. +{ + tls.setDefaultCACertificates([bundledCerts[1], encoder.encode(bundledCerts[2])]); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, [bundledCerts[1], bundledCerts[2]]); +} + +// Test mixed array with string and DataView. +{ + const uint8Cert = encoder.encode(bundledCerts[3]); + const dataViewCert = new DataView(uint8Cert.buffer, uint8Cert.byteOffset, uint8Cert.byteLength); + tls.setDefaultCACertificates([bundledCerts[1], dataViewCert]); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, [bundledCerts[1], bundledCerts[3]]); +} + +// Test mixed array with Buffer and Uint8Array. +{ + tls.setDefaultCACertificates([Buffer.from(bundledCerts[0], 'utf8'), encoder.encode(bundledCerts[2])]); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, [bundledCerts[0], bundledCerts[2]]); +} diff --git a/test/parallel/test-tls-set-default-ca-certificates-precedence-bundled.js b/test/parallel/test-tls-set-default-ca-certificates-precedence-bundled.js new file mode 100644 index 00000000000000..a9658adbb01e9c --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-precedence-bundled.js @@ -0,0 +1,53 @@ +'use strict'; + +// This tests that per-connection ca option overrides bundled default CA certificates. + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const assert = require('assert'); +const https = require('https'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); +const { includesCert } = require('../common/tls'); + +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('override works'); +}, 1)); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + const bundledCerts = tls.getCACertificates('bundled'); + const fakeStartcomCert = fixtures.readKey('fake-startcom-root-cert.pem'); + + // Set default CA to bundled certs (which don't include fake-startcom-root-cert) + tls.setDefaultCACertificates(bundledCerts); + + // Verify that fake-startcom-root-cert is not in default + const defaultCerts = tls.getCACertificates('default'); + assert(!includesCert(defaultCerts, fakeStartcomCert)); + + // Connection with per-connection ca should succeed despite wrong default + const req = https.request({ + hostname: 'localhost', + port: port, + path: '/', + method: 'GET', + ca: [fakeStartcomCert] // This should override the bundled defaults + }, common.mustCall((res) => { + assert.strictEqual(res.statusCode, 200); + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', common.mustCall(() => { + assert.strictEqual(data, 'override works'); + server.close(); + })); + })); + + req.on('error', common.mustNotCall('Should not error with per-connection ca option')); + req.end(); +})); diff --git a/test/parallel/test-tls-set-default-ca-certificates-precedence-empty.js b/test/parallel/test-tls-set-default-ca-certificates-precedence-empty.js new file mode 100644 index 00000000000000..1eacbc3109d2d0 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-precedence-empty.js @@ -0,0 +1,51 @@ +'use strict'; + +// This tests that per-connection ca option overrides empty default CA certificates + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const assert = require('assert'); +const https = require('https'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); + +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('per-connection ca works'); +}, 1)); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + const fakeStartcomCert = fixtures.readKey('fake-startcom-root-cert.pem'); + + // Set default CA to empty array - connections should normally fail + tls.setDefaultCACertificates([]); + + // Verify that default CA is empty + const defaultCerts = tls.getCACertificates('default'); + assert.deepStrictEqual(defaultCerts, []); + + // Connection with per-connection ca option should succeed despite empty default + const req = https.request({ + hostname: 'localhost', + port: port, + path: '/', + method: 'GET', + ca: [fakeStartcomCert] // This should override the empty default + }, common.mustCall((res) => { + assert.strictEqual(res.statusCode, 200); + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', common.mustCall(() => { + assert.strictEqual(data, 'per-connection ca works'); + server.close(); + })); + })); + + req.on('error', common.mustNotCall('Should not error with per-connection ca option')); + req.end(); +})); diff --git a/test/parallel/test-tls-set-default-ca-certificates-recovery.js b/test/parallel/test-tls-set-default-ca-certificates-recovery.js new file mode 100644 index 00000000000000..e3eb0e84149ae8 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-recovery.js @@ -0,0 +1,43 @@ +'use strict'; + +// This tests error recovery and fallback behavior for tls.setDefaultCACertificates() + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const assert = require('assert'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); +const { assertEqualCerts } = require('../common/tls'); + +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); + +// Test recovery from errors when setting default CA certificates. +function testRecovery(expectedCerts) { + { + const invalidCert = 'not a valid certificate'; + assert.throws(() => tls.setDefaultCACertificates([invalidCert]), { + code: 'ERR_CRYPTO_OPERATION_FAILED', + message: /No valid certificates found in the provided array/ + }); + assertEqualCerts(tls.getCACertificates('default'), expectedCerts); + } + + // Test with mixed valid and invalid certificate formats. + { + const invalidCert = '-----BEGIN CERTIFICATE-----\nvalid cert content\n-----END CERTIFICATE-----'; + assert.throws(() => tls.setDefaultCACertificates([fixtureCert, invalidCert]), { + code: 'ERR_OSSL_PEM_ASN1_LIB', + }); + assertEqualCerts(tls.getCACertificates('default'), expectedCerts); + } +} + +const originalDefaultCerts = tls.getCACertificates('default'); +testRecovery(originalDefaultCerts); + +// Check that recovery still works after replacing the default certificates. +const subset = tls.getCACertificates('bundled').slice(0, 3); +tls.setDefaultCACertificates(subset); +assertEqualCerts(tls.getCACertificates('default'), subset); +testRecovery(subset); diff --git a/test/parallel/test-tls-set-default-ca-certificates-reset-fetch.mjs b/test/parallel/test-tls-set-default-ca-certificates-reset-fetch.mjs new file mode 100644 index 00000000000000..479b415d4c518c --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-reset-fetch.mjs @@ -0,0 +1,47 @@ +// Flags: --no-use-system-ca + + +// This tests appending certificates to existing defaults should work correctly +// with fetch. + +import * as common from '../common/index.mjs'; +import { once } from 'node:events'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; + +if (!common.hasCrypto) common.skip('missing crypto'); + +const { default: https } = await import('node:https'); +const { default: tls } = await import('node:tls'); + +// Test HTTPS connection fails with bundled CA, succeeds after adding custom CA. +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('hello world'); +}, 1)); +server.listen(0); +await once(server, 'listening'); + +const fixturesCert = fixtures.readKey('fake-startcom-root-cert.pem'); +tls.setDefaultCACertificates([fixturesCert]); +// First, verify connection works with custom CA. +const response1 = await fetch(`https://localhost:${server.address().port}/custom-ca-test`); +assert.strictEqual(response1.status, 200); +const text1 = await response1.text(); +assert.strictEqual(text1, 'hello world'); + +// Now set empty CA store - connection should fail. +tls.setDefaultCACertificates([]); +// Use IP address to skip session cache. +await assert.rejects( + fetch(`https://127.0.0.1:${server.address().port}/empty-ca-test`), + (err) => { + assert.strictEqual(err.cause.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); + return true; + }, +); + +server.close(); diff --git a/test/parallel/test-tls-set-default-ca-certificates-reset-https-request.js b/test/parallel/test-tls-set-default-ca-certificates-reset-https-request.js new file mode 100644 index 00000000000000..7389cacf6d6682 --- /dev/null +++ b/test/parallel/test-tls-set-default-ca-certificates-reset-https-request.js @@ -0,0 +1,62 @@ +'use strict'; + +// This tests that tls.setDefaultCACertificates() affects actual HTTPS connections + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const assert = require('assert'); +const https = require('https'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); + +// Test HTTPS connection succeeds with proper CA, fails after removing it +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('hello world'); +}, 1)); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + + // First, set the correct CA certificate - connection should succeed. + tls.setDefaultCACertificates([fixtures.readKey('fake-startcom-root-cert.pem')]); + + const req1 = https.request({ + hostname: 'localhost', + port: port, + path: '/', + method: 'GET' + }, common.mustCall((res) => { + assert.strictEqual(res.statusCode, 200); + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', common.mustCall(() => { + assert.strictEqual(data, 'hello world'); + + // Now set empty CA store - connection should fail. + tls.setDefaultCACertificates([]); + + const req2 = https.request({ + hostname: '127.0.0.1', // Use a different hostname to skip session cache. + port: port, + path: '/', + method: 'GET' + }, common.mustNotCall('Should not succeed with empty CA')); + + req2.on('error', common.mustCall((err) => { + // Should fail with certificate verification error. + assert.strictEqual(err.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); + server.close(); + })); + + req2.end(); + })); + })); + + req1.on('error', common.mustNotCall('Should not error with correct CA')); + req1.end(); +})); diff --git a/test/parallel/test-trace-sigint-in-worker.js b/test/parallel/test-trace-sigint-in-worker.js new file mode 100644 index 00000000000000..ed7ca3908e7ca5 --- /dev/null +++ b/test/parallel/test-trace-sigint-in-worker.js @@ -0,0 +1,20 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const util = require('util'); +const { Worker, workerData } = require('worker_threads'); + +if (workerData?.isWorker) { + assert.throws(() => { + util.setTraceSigInt(true); + }, { + code: 'ERR_WORKER_UNSUPPORTED_OPERATION', + }); +} else { + const w = new Worker(__filename, { workerData: { isWorker: true } }); + w.on('exit', common.mustCall((code) => { + assert.strictEqual(code, 0); + })); +} diff --git a/test/parallel/test-util-styletext.js b/test/parallel/test-util-styletext.js index df2334651cc869..b87c5d7e82c74c 100644 --- a/test/parallel/test-util-styletext.js +++ b/test/parallel/test-util-styletext.js @@ -46,6 +46,83 @@ assert.strictEqual( '\u001b[1m\u001b[31mtest\u001b[39m\u001b[22m', ); +assert.strictEqual( + util.styleText('red', + 'A' + util.styleText('blue', 'B', { validateStream: false }) + 'C', + { validateStream: false }), + '\u001b[31mA\u001b[34mB\u001b[31mC\u001b[39m' +); + +assert.strictEqual( + util.styleText('red', + 'red' + + util.styleText('blue', 'blue', { validateStream: false }) + + 'red' + + util.styleText('blue', 'blue', { validateStream: false }) + + 'red', + { validateStream: false } + ), + '\x1B[31mred\x1B[34mblue\x1B[31mred\x1B[34mblue\x1B[31mred\x1B[39m' +); + +assert.strictEqual( + util.styleText('red', + 'red' + + util.styleText('blue', 'blue', { validateStream: false }) + + 'red' + + util.styleText('red', 'red', { validateStream: false }) + + 'red' + + util.styleText('blue', 'blue', { validateStream: false }), + { validateStream: false } + ), + '\x1b[31mred\x1b[34mblue\x1b[31mred\x1b[31mred\x1b[31mred\x1b[34mblue\x1b[39m\x1b[39m' +); + +assert.strictEqual( + util.styleText('red', + 'A' + util.styleText(['bgRed', 'blue'], 'B', { validateStream: false }) + + 'C', { validateStream: false }), + '\x1B[31mA\x1B[41m\x1B[34mB\x1B[31m\x1B[49mC\x1B[39m' +); + +assert.strictEqual( + util.styleText('dim', + 'dim' + + util.styleText('bold', 'bold', { validateStream: false }) + + 'dim', { validateStream: false }), + '\x1B[2mdim\x1B[1mbold\x1B[22m\x1B[2mdim\x1B[22m' +); + +assert.strictEqual( + util.styleText('blue', + 'blue' + + util.styleText('red', + 'red' + + util.styleText('green', 'green', { validateStream: false }) + + 'red', { validateStream: false }) + + 'blue', { validateStream: false }), + '\x1B[34mblue\x1B[31mred\x1B[32mgreen\x1B[31mred\x1B[34mblue\x1B[39m' +); + +assert.strictEqual( + util.styleText( + 'red', + 'red' + + util.styleText( + 'blue', + 'blue' + util.styleText('red', 'red', { + validateStream: false, + }) + 'blue', + { + validateStream: false, + } + ) + 'red', { + validateStream: false, + } + ), + '\x1b[31mred\x1b[34mblue\x1b[31mred\x1b[34mblue\x1b[31mred\x1b[39m' +); + assert.strictEqual( util.styleText(['bold', 'red'], 'test', { validateStream: false }), util.styleText( diff --git a/test/parallel/test-vm-context-dont-contextify.js b/test/parallel/test-vm-context-dont-contextify.js index d75fc1438d364a..ae9fb676067999 100644 --- a/test/parallel/test-vm-context-dont-contextify.js +++ b/test/parallel/test-vm-context-dont-contextify.js @@ -182,4 +182,4 @@ function checkFrozen(context) { const promise = script.runInContext(context); assert.strictEqual(await promise, namespace); } -})().catch(common.mustNotCall()); +})().then(common.mustCall()); diff --git a/test/parallel/test-worker-cpu-usage.js b/test/parallel/test-worker-cpu-usage.js new file mode 100644 index 00000000000000..b043f4fbd182f9 --- /dev/null +++ b/test/parallel/test-worker-cpu-usage.js @@ -0,0 +1,81 @@ +'use strict'; +const common = require('../common'); +const { isSunOS } = require('../common'); +const assert = require('assert'); +const { + Worker, +} = require('worker_threads'); + +function validate(result) { + assert.ok(typeof result == 'object' && result !== null); + assert.ok(result.user >= 0); + assert.ok(result.system >= 0); + assert.ok(Number.isFinite(result.user)); + assert.ok(Number.isFinite(result.system)); +} + +function check(worker) { + [ + -1, + 1.1, + NaN, + undefined, + {}, + [], + null, + function() {}, + Symbol(), + true, + Infinity, + { user: -1, system: 1 }, + { user: 1, system: -1 }, + ].forEach((value) => { + try { + worker.cpuUsage(value); + } catch (e) { + assert.ok(/ERR_OUT_OF_RANGE|ERR_INVALID_ARG_TYPE/i.test(e.code)); + } + }); +} + +const worker = new Worker(` + const { parentPort } = require('worker_threads'); + parentPort.on('message', () => {}); + `, { eval: true }); + +// See test-process-threadCpuUsage-main-thread.js +if (isSunOS) { + assert.throws( + () => worker.cpuUsage(), + { + code: 'ERR_OPERATION_FAILED', + name: 'Error', + message: 'Operation failed: worker.cpuUsage() is not available on SunOS' + } + ); + worker.terminate(); +} else { + worker.on('online', common.mustCall(async () => { + check(worker); + + const prev = await worker.cpuUsage(); + validate(prev); + + const curr = await worker.cpuUsage(); + validate(curr); + + assert.ok(curr.user >= prev.user); + assert.ok(curr.system >= prev.system); + + const delta = await worker.cpuUsage(curr); + validate(delta); + + worker.terminate(); + })); + + worker.once('exit', common.mustCall(async () => { + await assert.rejects(worker.cpuUsage(), { + code: 'ERR_WORKER_NOT_RUNNING' + }); + })); +} diff --git a/test/parallel/test-worker-message-port-transfer-filehandle.js b/test/parallel/test-worker-message-port-transfer-filehandle.js index 3e6afe22a8c636..41f9ebaff181ed 100644 --- a/test/parallel/test-worker-message-port-transfer-filehandle.js +++ b/test/parallel/test-worker-message-port-transfer-filehandle.js @@ -69,6 +69,11 @@ const { once } = require('events'); assert.strictEqual(fh.fd, -1); port1.postMessage('second message'); + await assert.rejects(() => fh.read(), { + code: 'EBADF', + message: 'The FileHandle has been transferred', + syscall: 'read' + }); })().then(common.mustCall()); (async function() { @@ -86,6 +91,7 @@ const { once } = require('events'); }); assert.deepStrictEqual(await readPromise, await fs.readFile(__filename)); + await fh.close(); })().then(common.mustCall()); (async function() { diff --git a/test/parallel/test-zlib-zstd-dictionary.js b/test/parallel/test-zlib-zstd-dictionary.js new file mode 100644 index 00000000000000..28dde28cb055b7 --- /dev/null +++ b/test/parallel/test-zlib-zstd-dictionary.js @@ -0,0 +1,26 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const zlib = require('zlib'); + +const dictionary = Buffer.from( + `Lorem ipsum dolor sit amet, consectetur adipiscing elit. + Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.` +); + +const input = Buffer.from( + `Lorem ipsum dolor sit amet, consectetur adipiscing elit. + Lorem ipsum dolor sit amet, consectetur adipiscing elit. + Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.` +); + +zlib.zstdCompress(input, { dictionary }, common.mustSucceed((compressed) => { + assert(compressed.length < input.length); + zlib.zstdDecompress(compressed, { dictionary }, common.mustSucceed((decompressed) => { + assert.strictEqual(decompressed.toString(), input.toString()); + })); +})); diff --git a/test/pseudo-tty/test-start-trace-sigint.js b/test/pseudo-tty/test-start-trace-sigint.js new file mode 100644 index 00000000000000..bbadae61093061 --- /dev/null +++ b/test/pseudo-tty/test-start-trace-sigint.js @@ -0,0 +1,32 @@ +'use strict'; + +const { mustCall } = require('../common'); +const childProcess = require('child_process'); +const assert = require('assert'); +const util = require('util'); + +if (process.env.CHILD === 'true') { + main(); +} else { + // Use inherited stdio child process to prevent test tools from determining + // the case as crashed from SIGINT + const cp = childProcess.spawn( + process.execPath, + [__filename], + { + env: { ...process.env, CHILD: 'true' }, + stdio: 'inherit', + }); + cp.on('exit', mustCall((code, signal) => { + assert.strictEqual(signal, 'SIGINT'); + assert.strictEqual(code, null); + })); +} + +function main() { + util.setTraceSigInt(true); + // Deactivate colors even if the tty does support colors. + process.env.NODE_DISABLE_COLORS = '1'; + process.kill(process.pid, 'SIGINT'); + while (true); +} diff --git a/test/pseudo-tty/test-start-trace-sigint.out b/test/pseudo-tty/test-start-trace-sigint.out new file mode 100644 index 00000000000000..e5e4911f844080 --- /dev/null +++ b/test/pseudo-tty/test-start-trace-sigint.out @@ -0,0 +1,11 @@ +KEYBOARD_INTERRUPT: Script execution was interrupted by `SIGINT` + at main (*/test-start-trace-sigint.js:*) + at */test-start-trace-sigint.js:* + at * + at * + at * + at * + at * + at * + at * + at * diff --git a/test/pseudo-tty/test-stop-trace-sigint.js b/test/pseudo-tty/test-stop-trace-sigint.js new file mode 100644 index 00000000000000..363a4008371ba6 --- /dev/null +++ b/test/pseudo-tty/test-stop-trace-sigint.js @@ -0,0 +1,32 @@ +'use strict'; + +const { mustCall } = require('../common'); +const childProcess = require('child_process'); +const assert = require('assert'); +const util = require('util'); + +if (process.env.CHILD === 'true') { + main(); +} else { + // Use inherited stdio child process to prevent test tools from determining + // the case as crashed from SIGINT + const cp = childProcess.spawn( + process.execPath, + ['--trace-sigint', __filename], + { + env: { ...process.env, CHILD: 'true' }, + stdio: 'inherit', + }); + cp.on('exit', mustCall((code, signal) => { + assert.strictEqual(signal, 'SIGINT'); + assert.strictEqual(code, null); + })); +} + +function main() { + util.setTraceSigInt(false); + // Deactivate colors even if the tty does support colors. + process.env.NODE_DISABLE_COLORS = '1'; + process.kill(process.pid, 'SIGINT'); + while (true); +} diff --git a/test/pseudo-tty/test-stop-trace-sigint.out b/test/pseudo-tty/test-stop-trace-sigint.out new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/test/report/report.status b/test/report/report.status index 3832cb760db735..2aaf4228c7caba 100644 --- a/test/report/report.status +++ b/test/report/report.status @@ -12,4 +12,4 @@ test-report-fatal-error: PASS, FLAKY [$system==ibmi] # https://github.com/nodejs/node/issues/58582 -test-report-exclude-network: PASS,FLAKY +test-report-exclude-network: SKIP diff --git a/test/report/test-report-fatalerror-oomerror-compact.js b/test/report/test-report-fatalerror-oomerror-compact.js index 66bc1fa88e624d..efe91f0bb4b3ad 100644 --- a/test/report/test-report-fatalerror-oomerror-compact.js +++ b/test/report/test-report-fatalerror-oomerror-compact.js @@ -25,7 +25,7 @@ const REPORT_FIELDS = [ tmpdir.refresh(); const args = ['--report-on-fatalerror', '--report-compact', ...ARGS]; const child = spawnSync(process.execPath, args, { cwd: tmpdir.path }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 1); diff --git a/test/report/test-report-fatalerror-oomerror-directory.js b/test/report/test-report-fatalerror-oomerror-directory.js index 39ba7a1ca4b516..a0ba3ee9f8125c 100644 --- a/test/report/test-report-fatalerror-oomerror-directory.js +++ b/test/report/test-report-fatalerror-oomerror-directory.js @@ -27,7 +27,7 @@ const REPORT_FIELDS = [ const dir = '--report-directory=' + tmpdir.path; const args = ['--report-on-fatalerror', dir, ...ARGS]; const child = spawnSync(process.execPath, args, { }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 1); diff --git a/test/report/test-report-fatalerror-oomerror-filename.js b/test/report/test-report-fatalerror-oomerror-filename.js index 9c3bb7e4d1a3ce..fd0a6a5f378389 100644 --- a/test/report/test-report-fatalerror-oomerror-filename.js +++ b/test/report/test-report-fatalerror-oomerror-filename.js @@ -30,7 +30,7 @@ const REPORT_FIELDS = [ ...ARGS, ]; const child = spawnSync(process.execPath, args, { encoding: 'utf8' }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 0); diff --git a/test/report/test-report-fatalerror-oomerror-not-set.js b/test/report/test-report-fatalerror-oomerror-not-set.js index a54003ac7192ce..79fb81771b26b5 100644 --- a/test/report/test-report-fatalerror-oomerror-not-set.js +++ b/test/report/test-report-fatalerror-oomerror-not-set.js @@ -20,7 +20,7 @@ const ARGS = [ // Verify that --report-on-fatalerror is respected when not set. const args = ARGS; const child = spawnSync(process.execPath, args, { cwd: tmpdir.path }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 0); } diff --git a/test/report/test-report-fatalerror-oomerror-set.js b/test/report/test-report-fatalerror-oomerror-set.js index 1a05f83d4e3b50..3363ada155f3c0 100644 --- a/test/report/test-report-fatalerror-oomerror-set.js +++ b/test/report/test-report-fatalerror-oomerror-set.js @@ -24,7 +24,7 @@ const REPORT_FIELDS = [ tmpdir.refresh(); const args = ['--report-on-fatalerror', ...ARGS]; const child = spawnSync(process.execPath, args, { cwd: tmpdir.path }); - assert.notStrictEqual(child.status, 0, 'Process exited unexpectedly'); + assert.notStrictEqual(child.status, 0); const reports = helper.findReports(child.pid, tmpdir.path); assert.strictEqual(reports.length, 1); diff --git a/test/sequential/test-async-wrap-getasyncid.js b/test/sequential/test-async-wrap-getasyncid.js index a75207b66e6633..a49ef8b1768e96 100644 --- a/test/sequential/test-async-wrap-getasyncid.js +++ b/test/sequential/test-async-wrap-getasyncid.js @@ -62,6 +62,7 @@ const { getSystemErrorName } = require('util'); delete providers.SIGINTWATCHDOG; delete providers.WORKERHEAPSNAPSHOT; delete providers.WORKERHEAPSTATISTICS; + delete providers.WORKERCPUUSAGE; delete providers.BLOBREADER; delete providers.RANDOMPRIMEREQUEST; delete providers.CHECKPRIMEREQUEST; diff --git a/test/sequential/test-cpu-prof-name.js b/test/sequential/test-cpu-prof-name.js index 3f1c6945c5436f..f40e639e92f624 100644 --- a/test/sequential/test-cpu-prof-name.js +++ b/test/sequential/test-cpu-prof-name.js @@ -8,6 +8,8 @@ const fixtures = require('../common/fixtures'); common.skipIfInspectorDisabled(); const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -41,3 +43,36 @@ const { assert.deepStrictEqual(profiles, [file]); verifyFrames(output, file, 'fibonacci.js'); } + +// --cpu-prof-name with ${pid} placeholder +{ + tmpdir.refresh(); + // eslint-disable-next-line no-template-curly-in-string + const profName = 'CPU.${pid}.cpuprofile'; + const dir = tmpdir.path; + + const output = spawnSync(process.execPath, [ + '--cpu-prof', + '--cpu-prof-interval', + kCpuProfInterval, + '--cpu-prof-name', + profName, + fixtures.path('workload', 'fibonacci.js'), + ], { + cwd: dir, + env + }); + + if (output.status !== 0) { + console.error(output.stderr.toString()); + } + + assert.strictEqual(output.status, 0); + + const expectedFile = path.join(dir, `CPU.${output.pid}.cpuprofile`); + assert.ok(fs.existsSync(expectedFile), `Expected file ${expectedFile} not found.`); + + verifyFrames(output, expectedFile, 'fibonacci.js'); + + fs.unlinkSync(expectedFile); +} diff --git a/test/sequential/test-debugger-custom-port.js b/test/sequential/test-debugger-custom-port.js index 7c8abdc0c55174..5ecb91c07fef98 100644 --- a/test/sequential/test-debugger-custom-port.js +++ b/test/sequential/test-debugger-custom-port.js @@ -11,7 +11,7 @@ const assert = require('assert'); // Custom port. const script = fixtures.path('debugger', 'three-lines.js'); -const cli = startCLI([`--port=${common.PORT}`, script]); +const cli = startCLI([`--port=${common.PORT}`, script], [], {}, { randomPort: false }); (async function() { try { await cli.waitForInitialBreak(); diff --git a/test/sequential/test-debugger-invalid-args.js b/test/sequential/test-debugger-invalid-args.js index a0c42cac4d466a..83a7e253a3f956 100644 --- a/test/sequential/test-debugger-invalid-args.js +++ b/test/sequential/test-debugger-invalid-args.js @@ -9,7 +9,7 @@ const assert = require('assert'); // Launch CLI w/o args. (async () => { - const cli = startCLI([]); + const cli = startCLI([], [], {}, { randomPort: false }); const code = await cli.quit(); assert.strictEqual(code, 9); assert.match(cli.output, /^Usage:/, 'Prints usage info'); @@ -17,7 +17,7 @@ const assert = require('assert'); // Launch w/ invalid host:port. (async () => { - const cli = startCLI([`localhost:${common.PORT}`]); + const cli = startCLI([`localhost:${common.PORT}`], [], {}, { randomPort: false }); const code = await cli.quit(); assert.match( cli.output, diff --git a/test/sequential/test-debugger-launch.mjs b/test/sequential/test-debugger-launch.mjs index d501d0f9eb21eb..b68fc9b983359e 100644 --- a/test/sequential/test-debugger-launch.mjs +++ b/test/sequential/test-debugger-launch.mjs @@ -8,7 +8,7 @@ import startCLI from '../common/debugger.js'; import assert from 'assert'; const script = path('debugger', 'three-lines.js'); -const cli = startCLI([script]); +const cli = startCLI([script], [], {}, { randomPort: false }); try { await cli.waitForInitialBreak(); await cli.waitForPrompt(); diff --git a/test/sequential/test-debugger-pid.js b/test/sequential/test-debugger-pid.js index 99062149dfe337..972c8448bc7440 100644 --- a/test/sequential/test-debugger-pid.js +++ b/test/sequential/test-debugger-pid.js @@ -13,7 +13,7 @@ const script = fixtures.path('debugger', 'alive.js'); const runTest = async () => { const target = spawn(process.execPath, [script]); - const cli = startCLI(['-p', `${target.pid}`]); + const cli = startCLI(['-p', `${target.pid}`], [], {}, { randomPort: false }); try { await cli.waitForPrompt(); diff --git a/test/sequential/test-inspector-network-resource.js b/test/sequential/test-inspector-network-resource.js new file mode 100644 index 00000000000000..73620866931c24 --- /dev/null +++ b/test/sequential/test-inspector-network-resource.js @@ -0,0 +1,181 @@ +// Flags: --inspect=0 --experimental-network-inspection +'use strict'; +const common = require('../common'); + +common.skipIfInspectorDisabled(); + +const { NodeInstance } = require('../common/inspector-helper'); +const test = require('node:test'); +const assert = require('node:assert'); +const path = require('path'); +const fs = require('fs'); + +const resourceUrl = 'http://localhost:3000/app.js'; +const resourcePath = path.join(__dirname, '../fixtures/inspector-network-resource/app.js.map'); + +const resourceText = fs.readFileSync(resourcePath, 'utf8'); +const embedPath = resourcePath.replace(/\\/g, '\\\\').replace(/'/g, "\\'"); +const script = ` + const { NetworkResources } = require('node:inspector'); + const fs = require('fs'); + NetworkResources.put('${resourceUrl}', fs.readFileSync('${embedPath}', 'utf8')); + console.log('Network resource loaded:', '${resourceUrl}'); + debugger; +`; + +async function setupSessionAndPauseAtEvalLastLine(script) { + const instance = new NodeInstance([ + '--inspect-wait=0', + '--experimental-inspector-network-resource', + ], script); + const session = await instance.connectInspectorSession(); + await session.send({ method: 'NodeRuntime.enable' }); + await session.waitForNotification('NodeRuntime.waitingForDebugger'); + await session.send({ method: 'Runtime.enable' }); + await session.send({ method: 'Debugger.enable' }); + await session.send({ method: 'Runtime.runIfWaitingForDebugger' }); + await session.waitForNotification('Debugger.paused'); + return { instance, session }; +} + +test('should load and stream a static network resource using loadNetworkResource and IO.read', async () => { + const { session } = await setupSessionAndPauseAtEvalLastLine(script); + const { resource } = await session.send({ + method: 'Network.loadNetworkResource', + params: { url: resourceUrl }, + }); + assert(resource.success, 'Resource should be loaded successfully'); + assert(resource.stream, 'Resource should have a stream handle'); + let result = await session.send({ method: 'IO.read', params: { handle: resource.stream } }); + let data = result.data; + let eof = result.eof; + let content = ''; + while (!eof) { + content += data; + result = await session.send({ method: 'IO.read', params: { handle: resource.stream } }); + data = result.data; + eof = result.eof; + } + content += data; + assert.strictEqual(content, resourceText); + await session.send({ method: 'IO.close', params: { handle: resource.stream } }); + await session.send({ method: 'Debugger.resume' }); + await session.waitForDisconnect(); +}); + +test('should return success: false for missing resource', async () => { + const { session } = await setupSessionAndPauseAtEvalLastLine(script); + const { resource } = await session.send({ + method: 'Network.loadNetworkResource', + params: { url: 'http://localhost:3000/does-not-exist.js' }, + }); + assert.strictEqual(resource.success, false); + assert(!resource.stream, 'No stream should be returned for missing resource'); + await session.send({ method: 'Debugger.resume' }); + await session.waitForDisconnect(); +}); + +test('should error or return empty for wrong stream id', async () => { + const { session } = await setupSessionAndPauseAtEvalLastLine(script); + const { resource } = await session.send({ + method: 'Network.loadNetworkResource', + params: { url: resourceUrl }, + }); + assert(resource.success); + const bogus = '999999'; + const result = await session.send({ method: 'IO.read', params: { handle: bogus } }); + assert(result.eof, 'Should be eof for bogus stream id'); + assert.strictEqual(result.data, ''); + await session.send({ method: 'IO.close', params: { handle: resource.stream } }); + await session.send({ method: 'Debugger.resume' }); + await session.waitForDisconnect(); +}); + +test('should support IO.read with size and offset', async () => { + const { session } = await setupSessionAndPauseAtEvalLastLine(script); + const { resource } = await session.send({ + method: 'Network.loadNetworkResource', + params: { url: resourceUrl }, + }); + assert(resource.success); + assert(resource.stream); + let result = await session.send({ method: 'IO.read', params: { handle: resource.stream, size: 5 } }); + assert.strictEqual(result.data, resourceText.slice(0, 5)); + result = await session.send({ method: 'IO.read', params: { handle: resource.stream, offset: 5, size: 5 } }); + assert.strictEqual(result.data, resourceText.slice(5, 10)); + result = await session.send({ method: 'IO.read', params: { handle: resource.stream, offset: 10 } }); + assert.strictEqual(result.data, resourceText.slice(10)); + await session.send({ method: 'IO.close', params: { handle: resource.stream } }); + await session.send({ method: 'Debugger.resume' }); + await session.waitForDisconnect(); +}); + +test('should load resource put from another thread', async () => { + const workerScript = ` + console.log('this is worker thread'); + debugger; + `; + const script = ` + const { NetworkResources } = require('node:inspector'); + const fs = require('fs'); + NetworkResources.put('${resourceUrl}', fs.readFileSync('${embedPath}', 'utf8')); + const { Worker } = require('worker_threads'); + const worker = new Worker(\`${workerScript}\`, {eval: true}); + `; + const instance = new NodeInstance([ + '--experimental-inspector-network-resource', + '--experimental-worker-inspection', + '--inspect-brk=0', + ], script); + const session = await instance.connectInspectorSession(); + await setupInspector(session); + await session.waitForNotification('Debugger.paused'); + await session.send({ method: 'Debugger.resume' }); + await session.waitForNotification('Target.targetCreated'); + await session.send({ method: 'Target.setAutoAttach', params: { autoAttach: true, waitForDebuggerOnStart: true } }); + let sessionId; + await session.waitForNotification((notification) => { + if (notification.method === 'Target.attachedToTarget') { + sessionId = notification.params.sessionId; + return true; + } + return false; + + }); + await setupInspector(session, sessionId); + + await session.waitForNotification('Debugger.paused'); + + const { resource } = await session.send({ + method: 'Network.loadNetworkResource', + params: { url: resourceUrl, sessionId }, + }); + + assert(resource.success, 'Resource should be loaded successfully'); + assert(resource.stream, 'Resource should have a stream handle'); + let result = await session.send({ method: 'IO.read', params: { handle: resource.stream, sessionId } }); + let data = result.data; + let eof = result.eof; + let content = ''; + while (!eof) { + content += data; + result = await session.send({ method: 'IO.read', params: { handle: resource.stream, sessionId } }); + data = result.data; + eof = result.eof; + } + content += data; + assert.strictEqual(content, resourceText); + await session.send({ method: 'IO.close', params: { handle: resource.stream, sessionId } }); + + await session.send({ method: 'Debugger.resume', sessionId }); + + await session.waitForDisconnect(); + + async function setupInspector(session, sessionId) { + await session.send({ method: 'NodeRuntime.enable', sessionId }); + await session.waitForNotification('NodeRuntime.waitingForDebugger'); + await session.send({ method: 'Runtime.enable', sessionId }); + await session.send({ method: 'Debugger.enable', sessionId }); + await session.send({ method: 'Runtime.runIfWaitingForDebugger', sessionId }); + } +}); diff --git a/test/sequential/test-tls-session-timeout.js b/test/sequential/test-tls-session-timeout.js index a93cdc793a2337..14baabd7a64b7d 100644 --- a/test/sequential/test-tls-session-timeout.js +++ b/test/sequential/test-tls-session-timeout.js @@ -35,34 +35,13 @@ const assert = require('assert'); const tls = require('tls'); const fixtures = require('../common/fixtures'); -const key = fixtures.readKey('rsa_private.pem'); -const cert = fixtures.readKey('rsa_cert.crt'); - -{ - // Node.js should not allow setting negative timeouts since new versions of - // OpenSSL do not handle those as users might expect - - for (const sessionTimeout of [-1, -100, -(2 ** 31)]) { - assert.throws(() => { - tls.createServer({ - key: key, - cert: cert, - ca: [cert], - sessionTimeout, - maxVersion: 'TLSv1.2', - }); - }, { - code: 'ERR_OUT_OF_RANGE', - message: 'The value of "options.sessionTimeout" is out of range. It ' + - `must be >= 0 && <= ${2 ** 31 - 1}. Received ${sessionTimeout}`, - }); - } -} - if (!opensslCli) { common.skip('node compiled without OpenSSL CLI.'); } +const key = fixtures.readKey('rsa_private.pem'); +const cert = fixtures.readKey('rsa_cert.crt'); + doTest(); // This test consists of three TLS requests -- @@ -77,7 +56,7 @@ function doTest() { const fs = require('fs'); const spawn = require('child_process').spawn; - const SESSION_TIMEOUT = 1; + const SESSION_TIMEOUT = 5; const options = { key: key, @@ -85,32 +64,26 @@ function doTest() { ca: [cert], sessionTimeout: SESSION_TIMEOUT, maxVersion: 'TLSv1.2', + sessionIdContext: 'test-session-timeout', }; - // We need to store a sample session ticket in the fixtures directory because - // `s_client` behaves incorrectly if we do not pass in both the `-sess_in` - // and the `-sess_out` flags, and the `-sess_in` argument must point to a - // file containing a proper serialization of a session ticket. - // To avoid a source control diff, we copy the ticket to a temporary file. - - const sessionFileName = (function() { - const ticketFileName = 'tls-session-ticket.txt'; - const tmpPath = tmpdir.resolve(ticketFileName); - fs.writeFileSync(tmpPath, fixtures.readSync(ticketFileName)); - return tmpPath; - }()); - - // Expects a callback -- cb(connectionType : enum ['New'|'Reused']) - - function Client(cb) { + const sessionFileName = tmpdir.resolve('tls-session-ticket.txt'); + // Expects a callback -- cb() + function Client(port, sessIn, sessOut, expectedType, cb) { const flags = [ 's_client', - '-connect', `localhost:${common.PORT}`, - '-sess_in', sessionFileName, - '-sess_out', sessionFileName, + '-connect', `localhost:${port}`, + '-CAfile', fixtures.path('keys', 'rsa_cert.crt'), + '-servername', 'localhost', ]; + if (sessIn) { + flags.push('-sess_in', sessIn); + } + if (sessOut) { + flags.push('-sess_out', sessOut); + } const client = spawn(opensslCli, flags, { - stdio: ['ignore', 'pipe', 'ignore'] + stdio: ['ignore', 'pipe', 'inherit'] }); let clientOutput = ''; @@ -119,6 +92,20 @@ function doTest() { }); client.on('exit', (code) => { let connectionType; + // Log the output for debugging purposes. Don't remove them or otherwise + // the CI output is useless when this test flakes. + console.log(' ----- [COMMAND] ---'); + console.log(`${opensslCli}, ${flags.join(' ')}`); + console.log(' ----- [STDOUT] ---'); + console.log(clientOutput); + console.log(' ----- [SESSION FILE] ---'); + try { + const stat = fs.statSync(sessionFileName); + console.log(`Session file size: ${stat.size} bytes`); + } catch (err) { + console.log('Error reading session file:', err); + } + const grepConnectionType = (line) => { const matches = line.match(/(New|Reused), /); if (matches) { @@ -131,6 +118,7 @@ function doTest() { throw new Error('unexpected output from openssl client'); } assert.strictEqual(code, 0); + assert.strictEqual(connectionType, expectedType); cb(connectionType); }); } @@ -143,18 +131,18 @@ function doTest() { cleartext.end(); }); - server.listen(common.PORT, () => { - Client((connectionType) => { - assert.strictEqual(connectionType, 'New'); - Client((connectionType) => { - assert.strictEqual(connectionType, 'Reused'); - setTimeout(() => { - Client((connectionType) => { - assert.strictEqual(connectionType, 'New'); - server.close(); - }); - }, (SESSION_TIMEOUT + 1) * 1000); - }); + server.listen(0, () => { + const port = server.address().port; + Client(port, undefined, sessionFileName, 'New', () => { + setTimeout(() => { + Client(port, sessionFileName, sessionFileName, 'Reused', () => { + setTimeout(() => { + Client(port, sessionFileName, sessionFileName, 'New', () => { + server.close(); + }); + }, (SESSION_TIMEOUT + 1) * 1000); + }); + }, 100); // Wait a bit to ensure the session ticket is saved. }); }); } diff --git a/test/system-ca/test-native-root-certs-env.mjs b/test/system-ca/test-native-root-certs-env.mjs new file mode 100644 index 00000000000000..bde7dfcd9610bc --- /dev/null +++ b/test/system-ca/test-native-root-certs-env.mjs @@ -0,0 +1,56 @@ +// Env: NODE_USE_SYSTEM_CA=1 +// Same as test-native-root-certs.mjs, just testing the environment variable instead of the flag. + +import * as common from '../common/index.mjs'; +import assert from 'node:assert/strict'; +import https from 'node:https'; +import fixtures from '../common/fixtures.js'; +import { it, beforeEach, afterEach, describe } from 'node:test'; +import { once } from 'events'; + +if (!common.hasCrypto) { + common.skip('requires crypto'); +} + +// To run this test, the system needs to be configured to trust +// the CA certificate first (which needs an interactive GUI approval, e.g. TouchID): +// see the README.md in this folder for instructions on how to do this. +const handleRequest = (req, res) => { + const path = req.url; + switch (path) { + case '/hello-world': + res.writeHead(200); + res.end('hello world\n'); + break; + default: + assert(false, `Unexpected path: ${path}`); + } +}; + +describe('use-system-ca', function() { + + async function setupServer(key, cert) { + const theServer = https.createServer({ + key: fixtures.readKey(key), + cert: fixtures.readKey(cert), + }, handleRequest); + theServer.listen(0); + await once(theServer, 'listening'); + + return theServer; + } + + let server; + + beforeEach(async function() { + server = await setupServer('agent8-key.pem', 'agent8-cert.pem'); + }); + + it('trusts a valid root certificate', async function() { + await fetch(`https://localhost:${server.address().port}/hello-world`); + }); + + afterEach(async function() { + server?.close(); + }); +}); diff --git a/test/system-ca/test-set-default-ca-certificates-append-system-ca.mjs b/test/system-ca/test-set-default-ca-certificates-append-system-ca.mjs new file mode 100644 index 00000000000000..491f62ce3dd8a8 --- /dev/null +++ b/test/system-ca/test-set-default-ca-certificates-append-system-ca.mjs @@ -0,0 +1,49 @@ +// Flags: --no-use-system-ca + +// This tests that tls.setDefaultCACertificates() can be used to remove +// system CA certificates from the default CA store. +// To run this test, install the certificates as described in README.md + +import * as common from '../common/index.mjs'; +import assert from 'node:assert/strict'; +import fixtures from '../common/fixtures.js'; +import { once } from 'events'; + +if (!common.hasCrypto) { + common.skip('requires crypto'); +} + +const { default: https } = await import('node:https'); +const { default: tls } = await import('node:tls'); + +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + res.writeHead(200); + res.end('hello world'); +}, 1)); +server.listen(0); +await once(server, 'listening'); + +const url = `https://localhost:${server.address().port}/hello-world`; + +// First attempt should fail without system certificates. +await assert.rejects( + fetch(url), + (err) => { + assert.strictEqual(err.cause.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); + return true; + }, +); + +// Now enable system CA certificates +tls.setDefaultCACertificates(tls.getCACertificates('system')); + +// Second attempt should succeed. +const response = await fetch(url); +assert.strictEqual(response.status, 200); +const text = await response.text(); +assert.strictEqual(text, 'hello world'); + +server.close(); diff --git a/test/system-ca/test-set-default-ca-certificates-override-system.mjs b/test/system-ca/test-set-default-ca-certificates-override-system.mjs new file mode 100644 index 00000000000000..9a791bd34490c5 --- /dev/null +++ b/test/system-ca/test-set-default-ca-certificates-override-system.mjs @@ -0,0 +1,87 @@ +// Flags: --use-system-ca + + +// This tests that tls.setDefaultCACertificates() can be used to dynamically +// enable system CA certificates for HTTPS connections. +// To run this test, install the certificates as described in README.md + +import * as common from '../common/index.mjs'; +import assert from 'node:assert/strict'; +import fixtures from '../common/fixtures.js'; +import { once } from 'events'; +import { includesCert, assertEqualCerts } from '../common/tls.js'; + +if (!common.hasCrypto) { + common.skip('requires crypto'); +} + +const { default: https } = await import('node:https'); +const { default: tls } = await import('node:tls'); + +// Verify that system CA includes the fake-startcom-root-cert. +const systemCerts = tls.getCACertificates('system'); +const fixturesCert = fixtures.readKey('fake-startcom-root-cert.pem'); +if (!includesCert(systemCerts, fixturesCert)) { + common.skip('fake-startcom-root-cert.pem not found in system CA store. ' + + 'Please follow setup instructions in test/system-ca/README.md'); +} +const bundledCerts = tls.getCACertificates('bundled'); +if (includesCert(bundledCerts, fixturesCert)) { + common.skip('fake-startcom-root-cert.pem should not be in bundled CA store'); +} + +const server = https.createServer({ + cert: fixtures.readKey('agent8-cert.pem'), + key: fixtures.readKey('agent8-key.pem'), +}, common.mustCall((req, res) => { + const path = req.url; + switch (path) { + case '/system-ca-test': + res.writeHead(200); + res.end('system ca works'); + break; + case '/bundled-ca-test': + res.writeHead(200); + res.end('bundled ca works'); + break; + default: + assert(false, `Unexpected path: ${path}`); + } +}, 1)); + + +server.listen(0); +await once(server, 'listening'); + +const url = `https://localhost:${server.address().port}`; + +// First, verify connection works with system CA (including fake-startcom-root-cert) +const response1 = await fetch(`${url}/system-ca-test`); +assert.strictEqual(response1.status, 200); +const text1 = await response1.text(); +assert.strictEqual(text1, 'system ca works'); + +// Now override with bundled certs (which do not include fake-startcom-root-cert) +tls.setDefaultCACertificates(bundledCerts); + +// Connection should now fail because fake-startcom-root-cert is no longer in the CA store. +// Use IP address to skip session cache. +await assert.rejects( + fetch(`https://127.0.0.1:${server.address().port}/bundled-ca-test`), + (err) => { + assert.strictEqual(err.cause.code, 'SELF_SIGNED_CERT_IN_CHAIN'); + return true; + }, +); + +// Verify that system CA type still returns original system certs +const stillSystemCerts = tls.getCACertificates('system'); +assertEqualCerts(stillSystemCerts, systemCerts); +assert(includesCert(stillSystemCerts, fixturesCert)); + +// Verify that default CA now returns bundled certs +const currentDefaults = tls.getCACertificates('default'); +assertEqualCerts(currentDefaults, bundledCerts); +assert(!includesCert(currentDefaults, fixturesCert)); + +server.close(); diff --git a/test/system-ca/test-set-default-ca-certificates-system-combinations.js b/test/system-ca/test-set-default-ca-certificates-system-combinations.js new file mode 100644 index 00000000000000..b6873053e8b9d7 --- /dev/null +++ b/test/system-ca/test-set-default-ca-certificates-system-combinations.js @@ -0,0 +1,58 @@ +// Flags: --use-system-ca + +// This tests various combinations of CA certificates with +// tls.setDefaultCACertificates(). + +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const tls = require('tls'); +const { assertEqualCerts } = require('../common/tls'); +const fixtures = require('../common/fixtures'); + +const fixtureCert = fixtures.readKey('fake-startcom-root-cert.pem'); +const originalBundled = tls.getCACertificates('bundled'); +const originalSystem = tls.getCACertificates('system'); + +function testSetCertificates(certs) { + // Test setting it can be verified with tls.getCACertificates(). + tls.setDefaultCACertificates(certs); + const result = tls.getCACertificates('default'); + assertEqualCerts(result, certs); + + // Verify that other certificate types are unchanged + const newBundled = tls.getCACertificates('bundled'); + const newSystem = tls.getCACertificates('system'); + assertEqualCerts(newBundled, originalBundled); + assertEqualCerts(newSystem, originalSystem); + + // Test implicit defaults. + const implicitDefaults = tls.getCACertificates(); + assertEqualCerts(implicitDefaults, certs); + + // Test cached results. + const cachedResult = tls.getCACertificates('default'); + assertEqualCerts(cachedResult, certs); + const cachedImplicitDefaults = tls.getCACertificates(); + assertEqualCerts(cachedImplicitDefaults, certs); + + // Test system CA certificates are not affected. + const systemCerts = tls.getCACertificates('system'); + assertEqualCerts(systemCerts, originalSystem); +} + +// Test setting with fixture certificate. +testSetCertificates([fixtureCert]); + +// Test setting with empty array. +testSetCertificates([]); + +// Test setting with bundled certificates +testSetCertificates(originalBundled); + +// Test setting with a subset of bundled certificates +if (originalBundled.length >= 3) { + testSetCertificates(originalBundled.slice(0, 3)); +} diff --git a/test/wpt/status/WebCryptoAPI.cjs b/test/wpt/status/WebCryptoAPI.cjs index 709d34b8f47c40..7e4639ca497ba0 100644 --- a/test/wpt/status/WebCryptoAPI.cjs +++ b/test/wpt/status/WebCryptoAPI.cjs @@ -26,4 +26,20 @@ module.exports = { ], }, }, + 'getRandomValues.any.js': { + 'fail': { + 'note': 'https://github.com/nodejs/node/issues/58987', + 'expected': [ + 'Large length: Int8Array', + 'Large length: Int16Array', + 'Large length: Int32Array', + 'Large length: BigInt64Array', + 'Large length: Uint8Array', + 'Large length: Uint8ClampedArray', + 'Large length: Uint16Array', + 'Large length: Uint32Array', + 'Large length: BigUint64Array', + ], + }, + }, }; diff --git a/test/wpt/status/webstorage.json b/test/wpt/status/webstorage.json index 10171601480aad..1dad4e2dd48403 100644 --- a/test/wpt/status/webstorage.json +++ b/test/wpt/status/webstorage.json @@ -22,5 +22,30 @@ }, "storage_session_window_reopen.window.js": { "skip": "window.open() is not supported in Node.js." + }, + "storage_session_setitem_quotaexceedederr.window.js": { + "fail": { + "note": "https://github.com/nodejs/node/issues/58987", + "expected": [ + "Throws QuotaExceededError when the quota has been exceeded" + ] + } + }, + "storage_local_setitem_quotaexceedederr.window.js": { + "fail": { + "note": "https://github.com/nodejs/node/issues/58987", + "expected": [ + "Throws QuotaExceededError when the quota has been exceeded" + ] + } + }, + "symbol-props.window.js": { + "fail": { + "note": "https://github.com/nodejs/node/issues/59310", + "expected": [ + "localStorage: defineProperty not configurable", + "sessionStorage: defineProperty not configurable" + ] + } } } diff --git a/tools/actions/lint-release-proposal-commit-list.mjs b/tools/actions/lint-release-proposal-commit-list.mjs index b6ec7213bafc0a..b5104a6503e489 100755 --- a/tools/actions/lint-release-proposal-commit-list.mjs +++ b/tools/actions/lint-release-proposal-commit-list.mjs @@ -34,6 +34,7 @@ if (commitListingStart === -1) { // Normalize for consistent comparison commitList = commitList .replaceAll('**(SEMVER-MINOR)** ', '') + .replaceAll(/(?<= - )\*\*\(CVE-\d{4}-\d+\)\*\* (?=\*\*)/g, '') .replaceAll('\\', ''); let expectedNumberOfCommitsLeft = commitList.match(/\n\* \[/g)?.length ?? 0; diff --git a/tools/actions/merge.sh b/tools/actions/merge.sh index 35d1e033501ba4..45947f64c2a4db 100755 --- a/tools/actions/merge.sh +++ b/tools/actions/merge.sh @@ -6,13 +6,12 @@ # To land a PR with this tool: # 1. Run `git node land --fixupAll` # 2. Copy the hash of the commit at the top of the PR branch. -# 3. Run `tools/actions/merge.sh `. +# 3. Run `tools/actions/merge.sh ` or `tools/actions/merge.sh `. set -xe pr=$1 commit_head=$2 -shift 2 || { echo "Expected two arguments"; exit 1; } OWNER=nodejs REPOSITORY=node @@ -20,11 +19,33 @@ REPOSITORY=node if expr "X$pr" : 'Xhttps://github.com/[^/]\{1,\}/[^/]\{1,\}/pull/[0-9]\{1,\}' >/dev/null; then OWNER="$(echo "$pr" | awk 'BEGIN { FS = "/" } ; { print $4 }')" REPOSITORY="$(echo "$pr" | awk 'BEGIN { FS = "/" } ; { print $5 }')" + [ -n "$commit_head" ] || commit_head="$(echo "$pr" | awk 'BEGIN { FS = "/" } ; { print $9 }')" pr="$(echo "$pr" | awk 'BEGIN { FS = "/" } ; { print $7 }')" -elif ! expr "X$pr" : 'X[0-9]\{1,\}' >/dev/null; then - echo "The first argument should be the PR ID or URL" fi +validation_error= +if ! expr "X${pr}X" : 'X[0-9]\{1,\}X' >/dev/null; then + set +x + echo "Invalid PR ID: $pr" + validation_error=1 +fi +if ! expr "X${commit_head}X" : 'X[a-f0-9]\{40\}X' >/dev/null; then + set +x + echo "Invalid PR head: $commit_head" + validation_error=1 +fi +[ -z "$validation_error" ] || { + echo 'Usage:' + printf '\t%s \n' "$0" + echo 'or:' + printf '\t%s \n' "$0" + echo 'Examples:' + printf '\t%s 12345 aaaaabbbbbcccccdddddeeeeefffff1111122222\n' "$0" + printf '\t%s https://github.com/%s/pull/12345 aaaaabbbbbcccccdddddeeeeefffff1111122222\n' "$0" "$OWNER/$REPOSITORY" + printf '\t%s https://github.com/%s/pull/12345/commits/aaaaabbbbbcccccdddddeeeeefffff1111122222\n' "$0" "$OWNER/$REPOSITORY" + exit 1 +} + git log -1 HEAD --pretty='format:%B' | git interpret-trailers --parse --no-divider | \ grep -q -x "^PR-URL: https://github.com/$OWNER/$REPOSITORY/pull/$pr$" || { echo "Invalid PR-URL trailer" diff --git a/tools/dep_updaters/update-openssl.sh b/tools/dep_updaters/update-openssl.sh index 06fbf266706d99..29d72e8700e704 100755 --- a/tools/dep_updaters/update-openssl.sh +++ b/tools/dep_updaters/update-openssl.sh @@ -11,7 +11,7 @@ cleanup() { download() { LATEST_TAG_NAME="$("$NODE" --input-type=module <<'EOF' -const res = await fetch('https://api.github.com/repos/openssl/openssl/git/matching-refs/tags/openssl-3.0'); +const res = await fetch('https://api.github.com/repos/openssl/openssl/git/matching-refs/tags/openssl-3.5'); if (!res.ok) throw new Error(`FetchError: ${res.status} ${res.statusText}`, { cause: res }); const releases = await res.json() const latest = releases.at(-1); @@ -64,10 +64,8 @@ EOF } regenerate() { - command -v perl >/dev/null 2>&1 || { echo >&2 "Error: 'Perl' required but not installed."; exit 1; } - command -v nasm >/dev/null 2>&1 || { echo >&2 "Error: 'nasm' required but not installed."; exit 1; } - command -v as >/dev/null 2>&1 || { echo >&2 "Error: 'GNU as' required but not installed."; exit 1; } - perl -e "use Text::Template">/dev/null 2>&1 || { echo >&2 "Error: 'Text::Template' Perl module required but not installed."; exit 1; } + command -v docker >/dev/null 2>&1 || { echo >&2 "Error: 'docker' required but not installed."; exit 1; } + command -v make >/dev/null 2>&1 || { echo >&2 "Error: 'make' required but not installed."; exit 1; } echo "Regenerating platform-dependent files..." @@ -76,7 +74,7 @@ regenerate() { # See https://github.com/nodejs/node/blob/main/doc/contributing/maintaining/maintaining-openssl.md#2-execute-make-in-depsopensslconfig-directory sed -i 's/#ifdef/%ifdef/g' "$DEPS_DIR/openssl/openssl/crypto/perlasm/x86asm.pl" sed -i 's/#endif/%endif/g' "$DEPS_DIR/openssl/openssl/crypto/perlasm/x86asm.pl" - make -C "$DEPS_DIR/openssl/config" + make -C "$BASE_DIR" gen-openssl echo "All done!" echo "" diff --git a/tools/eslint/package-lock.json b/tools/eslint/package-lock.json index 1f8a3f9a15e175..6f8f6ab2b93c9d 100644 --- a/tools/eslint/package-lock.json +++ b/tools/eslint/package-lock.json @@ -449,18 +449,30 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.1.tgz", - "integrity": "sha512-0J+zgWxHN+xXONWIyPWKFMgVuJoZuGiIFu8yxk7RJjxkzpGmyja5wRFqZIVtjDVOQpV+Rw0iOAjYPE2eQyjr0w==", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", + "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.14.0", + "@eslint/core": "^0.15.1", "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", diff --git a/tools/lint-readme-lists.mjs b/tools/lint-readme-lists.mjs index 97992b79b1e32e..6ad74d6edf72fa 100755 --- a/tools/lint-readme-lists.mjs +++ b/tools/lint-readme-lists.mjs @@ -6,7 +6,7 @@ import assert from 'node:assert'; import { open } from 'node:fs/promises'; import { argv } from 'node:process'; -const ghHandleLine = /^\* \[(.+)\]\(https:\/\/github\.com\/\1\) -$/; +const ghHandleLine = /^\* \[(.+)\]\(https:\/\/github\.com\/(.+)\) -$/; const memberInfoLine = /^ {2}\*\*[^*]+\*\* <<[^@]+@.+\.[a-z]+>>( \(\w+(\/[^)/]+)+\))?( - \[Support me\]\(.+\))?$/; const lists = { @@ -59,9 +59,11 @@ for await (const line of readme.readLines()) { ); } - if (!ghHandleLine.test(line)) { - throw new Error(`${currentGithubHandle} is not formatted correctly (README.md:${lineNumber})`); + const match = line.match(ghHandleLine); + if (!match) { + throw new Error(`${line} should match ${ghHandleLine} (README.md:${lineNumber})`); } + assert.strictEqual(match[1], match[2], `GitHub handle does not match the URL (README.md:${lineNumber})`); if ( currentList === 'TSC voting members' || diff --git a/tools/msvs/msi/nodemsi/product.wxs b/tools/msvs/msi/nodemsi/product.wxs index ff66ade6816aaa..a0100cc564405f 100644 --- a/tools/msvs/msi/nodemsi/product.wxs +++ b/tools/msvs/msi/nodemsi/product.wxs @@ -41,7 +41,7 @@ - + diff --git a/tools/test.py b/tools/test.py index 83bb8c34db039e..0bf3a4fa691fea 100755 --- a/tools/test.py +++ b/tools/test.py @@ -1596,7 +1596,7 @@ def ArgsToTestPaths(test_root, args, suites): if len(args) == 0 or 'default' in args: def_suites = [s for s in suites if s not in IGNORED_SUITES] args = [a for a in args if a != 'default'] + def_suites - subsystem_regex = re.compile(r'^[a-zA-Z-]*$') + subsystem_regex = re.compile(r'^[a-zA-Z0-9-]*$') check = lambda arg: subsystem_regex.match(arg) and (arg not in suites) mapped_args = ["*/test*-%s-*" % arg if check(arg) else arg for arg in args] paths = [SplitPath(NormalizePath(a)) for a in mapped_args] diff --git a/typings/globals.d.ts b/typings/globals.d.ts index 3ae6835c6fdc78..703cd5755ffedd 100644 --- a/typings/globals.d.ts +++ b/typings/globals.d.ts @@ -1,15 +1,20 @@ +import { AsyncContextFrameBinding } from './internalBinding/async_context_frame'; import { AsyncWrapBinding } from './internalBinding/async_wrap'; import { BlobBinding } from './internalBinding/blob'; import { ConfigBinding } from './internalBinding/config'; import { ConstantsBinding } from './internalBinding/constants'; import { DebugBinding } from './internalBinding/debug'; +import { EncodingBinding } from './internalBinding/encoding_binding'; import { HttpParserBinding } from './internalBinding/http_parser'; import { InspectorBinding } from './internalBinding/inspector'; import { FsBinding } from './internalBinding/fs'; import { FsDirBinding } from './internalBinding/fs_dir'; +import { ICUBinding } from './internalBinding/icu'; import { MessagingBinding } from './internalBinding/messaging'; import { OptionsBinding } from './internalBinding/options'; import { OSBinding } from './internalBinding/os'; +import { ProcessBinding } from './internalBinding/process'; +import { SeaBinding } from './internalBinding/sea'; import { SerdesBinding } from './internalBinding/serdes'; import { SymbolsBinding } from './internalBinding/symbols'; import { TimersBinding } from './internalBinding/timers'; @@ -22,19 +27,24 @@ import { ModulesBinding } from './internalBinding/modules'; import { ZlibBinding } from './internalBinding/zlib'; interface InternalBindingMap { + async_context_frame: AsyncContextFrameBinding; async_wrap: AsyncWrapBinding; blob: BlobBinding; config: ConfigBinding; constants: ConstantsBinding; debug: DebugBinding; + encoding_binding: EncodingBinding; fs: FsBinding; fs_dir: FsDirBinding; http_parser: HttpParserBinding; + icu: ICUBinding; inspector: InspectorBinding; messaging: MessagingBinding; modules: ModulesBinding; options: OptionsBinding; os: OSBinding; + process: ProcessBinding; + sea: SeaBinding; serdes: SerdesBinding; symbols: SymbolsBinding; timers: TimersBinding; @@ -59,6 +69,7 @@ declare global { | Int8Array | Int16Array | Int32Array + | Float16Array | Float32Array | Float64Array | BigUint64Array diff --git a/typings/internalBinding/async_context_frame.d.ts b/typings/internalBinding/async_context_frame.d.ts new file mode 100644 index 00000000000000..e7387b57653fd1 --- /dev/null +++ b/typings/internalBinding/async_context_frame.d.ts @@ -0,0 +1,4 @@ +export interface AsyncContextFrameBinding { + getContinuationPreservedEmbedderData(): unknown, + setContinuationPreservedEmbedderData(frame: unknown): void, +} diff --git a/typings/internalBinding/constants.d.ts b/typings/internalBinding/constants.d.ts index 75d2dcc65e8ea7..3c29df44c13303 100644 --- a/typings/internalBinding/constants.d.ts +++ b/typings/internalBinding/constants.d.ts @@ -287,6 +287,9 @@ export interface ConstantsBinding { BROTLI_ENCODE: 9; ZSTD_COMPRESS: 10; ZSTD_DECOMPRESS: 11; + ZSTD_e_continue: 0; + ZSTD_e_flush: 1; + ZSTD_e_end: 2; Z_MIN_WINDOWBITS: 8; Z_MAX_WINDOWBITS: 15; Z_DEFAULT_WINDOWBITS: 15; diff --git a/typings/internalBinding/encoding_binding.d.ts b/typings/internalBinding/encoding_binding.d.ts new file mode 100644 index 00000000000000..6e1d48efd81529 --- /dev/null +++ b/typings/internalBinding/encoding_binding.d.ts @@ -0,0 +1,8 @@ +export interface EncodingBinding { + encodeInto(source: string, dest: Uint8Array): void; + encodeUtf8String(str: string): Uint8Array; + decodeUTF8(buffer: ArrayBufferView | ArrayBuffer | SharedArrayBuffer, ignoreBOM?: boolean, hasFatal?: boolean): string; + toASCII(input: string): string; + toUnicode(input: string): string; + decodeLatin1(buffer: ArrayBufferView | ArrayBuffer | SharedArrayBuffer, ignoreBOM?: boolean, hasFatal?: boolean): string; +} diff --git a/typings/internalBinding/http_parser.d.ts b/typings/internalBinding/http_parser.d.ts index 5ab5651b637176..124bdd5af2f152 100644 --- a/typings/internalBinding/http_parser.d.ts +++ b/typings/internalBinding/http_parser.d.ts @@ -2,6 +2,15 @@ declare namespace InternalHttpParserBinding { type Buffer = Uint8Array; type Stream = object; + class ConnectionsList { + constructor(); + + all(): HTTPParser[]; + idle(): HTTPParser[]; + active(): HTTPParser[]; + expired(): HTTPParser[]; + } + class HTTPParser { static REQUEST: 1; static RESPONSE: 2; @@ -40,6 +49,8 @@ declare namespace InternalHttpParserBinding { } export interface HttpParserBinding { - methods: string[]; + ConnectionsList: typeof InternalHttpParserBinding.ConnectionsList; HTTPParser: typeof InternalHttpParserBinding.HTTPParser; + allMethods: string[]; + methods: string[]; } diff --git a/typings/internalBinding/icu.d.ts b/typings/internalBinding/icu.d.ts new file mode 100644 index 00000000000000..7a688a5f08233e --- /dev/null +++ b/typings/internalBinding/icu.d.ts @@ -0,0 +1,18 @@ +export interface ICUBinding { + Converter: object; + decode( + converter: object, + input: ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + flags: number, + fromEncoding: string, + ): string; + getConverter(label: string, flags: number): object | undefined; + getStringWidth(value: string, ambiguousAsFullWidth?: boolean, expandEmojiSequence?: boolean): number; + hasConverter(label: string): boolean; + icuErrName(status: number): string; + transcode( + input: ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + fromEncoding: string, + toEncoding: string, + ): Buffer | number; +} diff --git a/typings/internalBinding/inspector.d.ts b/typings/internalBinding/inspector.d.ts index ab32838b2366ca..c0ee57774d87dd 100644 --- a/typings/internalBinding/inspector.d.ts +++ b/typings/internalBinding/inspector.d.ts @@ -33,4 +33,5 @@ export interface InspectorBinding { console: Console; Connection: InspectorConnectionConstructor; MainThreadConnection: InspectorConnectionConstructor; + putNetworkResource: (url: string, resource: string) => void; } diff --git a/typings/internalBinding/process.d.ts b/typings/internalBinding/process.d.ts new file mode 100644 index 00000000000000..99280fd43ba4d4 --- /dev/null +++ b/typings/internalBinding/process.d.ts @@ -0,0 +1,15 @@ +interface CpuUsageValue { + user: number; + system: number; +} + +declare namespace InternalProcessBinding { + interface Process { + cpuUsage(previousValue?: CpuUsageValue): CpuUsageValue; + threadCpuUsage(previousValue?: CpuUsageValue): CpuUsageValue; + } +} + +export interface ProcessBinding { + process: InternalProcessBinding.Process; +} diff --git a/typings/internalBinding/sea.d.ts b/typings/internalBinding/sea.d.ts new file mode 100644 index 00000000000000..15f4430d87d1b4 --- /dev/null +++ b/typings/internalBinding/sea.d.ts @@ -0,0 +1,5 @@ +export interface SeaBinding { + getAsset(key: string): ArrayBuffer | undefined; + isExperimentalSeaWarningNeeded(): boolean; + isSea(): boolean; +} diff --git a/typings/internalBinding/symbols.d.ts b/typings/internalBinding/symbols.d.ts index 96310970d5cdee..8f754b283c0351 100644 --- a/typings/internalBinding/symbols.d.ts +++ b/typings/internalBinding/symbols.d.ts @@ -1,5 +1,6 @@ export const async_id_symbol: unique symbol; export const handle_onclose_symbol: unique symbol; +export const imported_cjs_symbol: unique symbol; export const no_message_symbol: unique symbol; export const messaging_deserialize_symbol: unique symbol; export const messaging_transfer_symbol: unique symbol; @@ -13,6 +14,7 @@ export const trigger_async_id_symbol: unique symbol; export interface SymbolsBinding { async_id_symbol: typeof async_id_symbol; handle_onclose_symbol: typeof handle_onclose_symbol; + imported_cjs_symbol: typeof imported_cjs_symbol; no_message_symbol: typeof no_message_symbol; messaging_deserialize_symbol: typeof messaging_deserialize_symbol; messaging_transfer_symbol: typeof messaging_transfer_symbol; diff --git a/typings/internalBinding/util.d.ts b/typings/internalBinding/util.d.ts index 34b4b31c1d8abd..2a68699283debe 100644 --- a/typings/internalBinding/util.d.ts +++ b/typings/internalBinding/util.d.ts @@ -45,4 +45,23 @@ export interface UtilBinding { guessHandleType(fd: number): 'TCP' | 'TTY' | 'UDP' | 'FILE' | 'PIPE' | 'UNKNOWN'; parseEnv(content: string): Record; styleText(format: Array | string, text: string): string; + isInsideNodeModules(frameLimit: number, defaultValue: unknown): boolean; + + constants: { + kPending: 0; + kFulfilled: 1; + kRejected: 2; + kExiting: 0; + kExitCode: 1; + kHasExitCode: 2; + ALL_PROPERTIES: 0; + ONLY_WRITABLE: 1; + ONLY_ENUMERABLE: 2; + ONLY_CONFIGURABLE: 4; + SKIP_STRINGS: 8; + SKIP_SYMBOLS: 16; + kDisallowCloneAndTransfer: 0; + kTransferable: 1; + kCloneable: 2; + }; } diff --git a/typings/internalBinding/worker.d.ts b/typings/internalBinding/worker.d.ts index 0a316aaf5e5bff..9dfd9776cec41c 100644 --- a/typings/internalBinding/worker.d.ts +++ b/typings/internalBinding/worker.d.ts @@ -16,6 +16,7 @@ declare namespace InternalWorkerBinding { getResourceLimits(): Float64Array; takeHeapSnapshot(): object; getHeapStatistics(): Promise; + cpuUsage(): Promise; loopIdleTime(): number; loopStartTime(): number; } diff --git a/typings/primordials.d.ts b/typings/primordials.d.ts index c70074d2429a4e..c8bf95d1f2cfc8 100644 --- a/typings/primordials.d.ts +++ b/typings/primordials.d.ts @@ -163,7 +163,7 @@ declare namespace primordials { export import ArrayBuffer = globalThis.ArrayBuffer; export const ArrayBufferPrototype: typeof ArrayBuffer.prototype export const ArrayBufferIsView: typeof ArrayBuffer.isView - export const ArrayBufferPrototypeGetDetached: UncurryThis + export const ArrayBufferPrototypeGetDetached: UncurryGetter; export const ArrayBufferPrototypeSlice: UncurryThis export const ArrayBufferPrototypeTransfer: UncurryThis export const ArrayBufferPrototypeGetByteLength: UncurryGetter; @@ -279,6 +279,7 @@ declare namespace primordials { export const FunctionPrototypeApply: UncurryThis export const FunctionPrototypeBind: UncurryThis export const FunctionPrototypeCall: UncurryThis + export const FunctionPrototypeSymbolHasInstance: UncurryMethod export const FunctionPrototypeToString: UncurryThis export import Int16Array = globalThis.Int16Array; export const Int16ArrayPrototype: typeof Int16Array.prototype @@ -370,6 +371,8 @@ declare namespace primordials { export const RegExpPrototypeGetSource: UncurryGetter; export const RegExpPrototypeGetSticky: UncurryGetter; export const RegExpPrototypeGetUnicode: UncurryGetter; + export const RegExpPrototypeSymbolReplace: UncurryMethod + export const RegExpPrototypeSymbolSplit: UncurryMethod export import Set = globalThis.Set; export const SetLength: typeof Set.length export const SetName: typeof Set.name diff --git a/unofficial.gni b/unofficial.gni index da565473f1ae96..865a0d5ce9c679 100644 --- a/unofficial.gni +++ b/unofficial.gni @@ -22,6 +22,11 @@ template("node_gn_build") { } else { defines += [ "HAVE_OPENSSL=0" ] } + if (node_use_sqlite) { + defines += [ "HAVE_SQLITE=1" ] + } else { + defines += [ "HAVE_SQLITE=0" ] + } if (node_use_amaro) { defines += [ "HAVE_AMARO=1" ] } else { @@ -159,7 +164,6 @@ template("node_gn_build") { "deps/nghttp2", "deps/ngtcp2", "deps/postject", - "deps/sqlite", "deps/uvwasi", "deps/zstd", "//third_party/zlib", @@ -193,6 +197,10 @@ template("node_gn_build") { public_deps += [ "$node_openssl_path" ] sources += gypi_values.node_crypto_sources } + if (node_use_sqlite) { + deps += [ "deps/sqlite" ] + sources += gypi_values.node_sqlite_sources + } if (node_enable_inspector) { deps += [ "$node_inspector_protocol_path:crdtp",