diff --git a/.github/workflows/ASAN.yml b/.github/workflows/ASAN.yml new file mode 100644 index 00000000000..77b8fd530ad --- /dev/null +++ b/.github/workflows/ASAN.yml @@ -0,0 +1,20 @@ +name: node ASAN + +on: [push, pull_request] + +jobs: + ubuntu-build: + runs-on: ubuntu-latest + container: gengjiawen/node-build:2020-02-14 + steps: + - uses: actions/checkout@v2 + - name: Build + run: | + npx envinfo + ./configure --debug --enable-asan --ninja && ninja -C out/Debug + - name: Test + env: + ASAN_OPTIONS: halt_on_error=0 + continue-on-error: true + run: | + python3 tools/test.py -J --mode=debug diff --git a/.gitignore b/.gitignore index 160b96f74a5..425a5ddbec0 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ /doc/api.xml /node /node_g +/gon-config.json /*.exe /*.swp /out diff --git a/.mailmap b/.mailmap index e140392b723..15962df50bd 100644 --- a/.mailmap +++ b/.mailmap @@ -423,6 +423,7 @@ Wilson Lin Wyatt Preul geek Xavier J Ortiz xiaoyu <306766053@qq.com> Poker <306766053@qq.com> +Yael Hermon Yazhong Liu Yazhong Liu Yazhong Liu Yorkie Yazhong Liu Yorkie diff --git a/AUTHORS b/AUTHORS index adb8aef88a4..539f311a0dc 100644 --- a/AUTHORS +++ b/AUTHORS @@ -2636,7 +2636,7 @@ Charles Samborski zhmushan yoshimoto koki Ilarion Halushka -Yael Hermon +Yael Hermon Mitch Hankins Mikko Rantanen wenjun ye <1728914873@qq.com> diff --git a/BUILDING.md b/BUILDING.md index 4f24ebe06c0..5c3923f2157 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -106,8 +106,9 @@ platforms. This is true regardless of entries in the table below. | GNU/Linux | armv6 | kernel >= 4.14, glibc >= 2.24 | Experimental | Downgraded as of Node.js 12 | | GNU/Linux | ppc64le >=power8 | kernel >= 3.10.0, glibc >= 2.17 | Tier 2 | e.g. Ubuntu 16.04 [1](#fn1), EL 7 [2](#fn2) | | GNU/Linux | s390x | kernel >= 3.10.0, glibc >= 2.17 | Tier 2 | e.g. EL 7 [2](#fn2) | -| Windows | x64, x86 (WoW64) | >= Windows 7/2008 R2/2012 R2 | Tier 1 | [4](#fn4),[5](#fn5) | -| Windows | x86 (native) | >= Windows 7/2008 R2/2012 R2 | Tier 1 (running) / Experimental (compiling) [6](#fn6) | | +| Windows | x64, x86 (WoW64) | >= Windows 8.1/2012 R2 | Tier 1 | [4](#fn4),[5](#fn5) | +| Windows | x86 (native) | >= Windows 8.1/2012 R2 | Tier 1 (running) / Experimental (compiling) [6](#fn6) | | +| Windows | x64, x86 | Windows Server 2012 (not R2) | Experimental | | | Windows | arm64 | >= Windows 10 | Experimental | | | macOS | x64 | >= 10.11 | Tier 1 | | | SmartOS | x64 | >= 18 | Tier 2 | | @@ -167,14 +168,14 @@ Binaries at are produced on: | Binary package | Platform and Toolchain | | --------------------- | ------------------------------------------------------------------------ | | aix-ppc64 | AIX 7.1 TL05 on PPC64BE with GCC 6 | -| darwin-x64 (and .pkg) | macOS 10.11, Xcode Command Line Tools 10 with -mmacosx-version-min=10.10 | +| darwin-x64 (and .pkg) | macOS 10.15, Xcode Command Line Tools 11 with -mmacosx-version-min=10.10 | | linux-arm64 | CentOS 7 with devtoolset-6 / GCC 6 | | linux-armv7l | Cross-compiled on Ubuntu 16.04 x64 with [custom GCC toolchain](https://github.com/rvagg/rpi-newer-crosstools) | | linux-ppc64le | CentOS 7 with devtoolset-6 / GCC 6 [7](#fn7) | | linux-s390x | RHEL 7 with devtoolset-6 / GCC 6 [7](#fn7) | | linux-x64 | CentOS 7 with devtoolset-6 / GCC 6 [7](#fn7) | | sunos-x64 | SmartOS 18 with GCC 7 | -| win-x64 and win-x86 | Windows 2012 R2 (x64) with Visual Studio 2017 | +| win-x64 and win-x86 | Windows 2012 R2 (x64) with Visual Studio 2019 | 7: The Enterprise Linux devtoolset-6 allows us to compile binaries with GCC 6 but linked to the glibc and libstdc++ versions of the host diff --git a/CHANGELOG.md b/CHANGELOG.md index 3da2f7373e5..b32c52810cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,19 +2,19 @@ Select a Node.js version below to view the changelog history: -* [Node.js 13](doc/changelogs/CHANGELOG_V13.md) - **Current** -* [Node.js 12](doc/changelogs/CHANGELOG_V12.md) - **Long Term Support** -* [Node.js 11](doc/changelogs/CHANGELOG_V11.md) - End-of-Life -* [Node.js 10](doc/changelogs/CHANGELOG_V10.md) — Long Term Support -* [Node.js 9](doc/changelogs/CHANGELOG_V9.md) — End-of-Life -* [Node.js 8](doc/changelogs/CHANGELOG_V8.md) — End-of-Life -* [Node.js 7](doc/changelogs/CHANGELOG_V7.md) — End-of-Life -* [Node.js 6](doc/changelogs/CHANGELOG_V6.md) — End-of-Life -* [Node.js 5](doc/changelogs/CHANGELOG_V5.md) — End-of-Life -* [Node.js 4](doc/changelogs/CHANGELOG_V4.md) — End-of-Life -* [io.js](doc/changelogs/CHANGELOG_IOJS.md) — End-of-Life -* [Node.js 0.12](doc/changelogs/CHANGELOG_V012.md) — End-of-Life -* [Node.js 0.10](doc/changelogs/CHANGELOG_V010.md) — End-of-Life +* [Node.js 13](doc/changelogs/CHANGELOG_V13.md) **Current** +* [Node.js 12](doc/changelogs/CHANGELOG_V12.md) **Long Term Support** +* [Node.js 11](doc/changelogs/CHANGELOG_V11.md) End-of-Life +* [Node.js 10](doc/changelogs/CHANGELOG_V10.md) Long Term Support +* [Node.js 9](doc/changelogs/CHANGELOG_V9.md) End-of-Life +* [Node.js 8](doc/changelogs/CHANGELOG_V8.md) End-of-Life +* [Node.js 7](doc/changelogs/CHANGELOG_V7.md) End-of-Life +* [Node.js 6](doc/changelogs/CHANGELOG_V6.md) End-of-Life +* [Node.js 5](doc/changelogs/CHANGELOG_V5.md) End-of-Life +* [Node.js 4](doc/changelogs/CHANGELOG_V4.md) End-of-Life +* [io.js](doc/changelogs/CHANGELOG_IOJS.md) End-of-Life +* [Node.js 0.12](doc/changelogs/CHANGELOG_V012.md) End-of-Life +* [Node.js 0.10](doc/changelogs/CHANGELOG_V010.md) End-of-Life * [Archive](doc/changelogs/CHANGELOG_ARCHIVE.md) Please use the following table to find the changelog for a specific Node.js @@ -29,7 +29,10 @@ release. -13.8.0
+13.10.1
+13.10.0
+13.9.0
+13.8.0
13.7.0
13.6.0
13.5.0
@@ -41,7 +44,8 @@ release. 13.0.0
-12.16.0
+12.16.1
+12.16.0
12.15.0
12.14.1
12.14.0
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b9d1f2cef60..29700978fb7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,13 +5,13 @@ * [Pull Requests](#pull-requests) * [Developer's Certificate of Origin 1.1](#developers-certificate-of-origin) -## [Code of Conduct](./doc/guides/contributing/coc.md) +## [Code of Conduct](./doc/guides/contributing/code-of-conduct.md) The Node.js project has a [Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) to which all contributors must adhere. -See [details on our policy on Code of Conduct](./doc/guides/contributing/coc.md). +See [details on our policy on Code of Conduct](./doc/guides/contributing/code-of-conduct.md). ## [Issues](./doc/guides/contributing/issues.md) diff --git a/GOVERNANCE.md b/GOVERNANCE.md index d7cb6e321e1..5048a700340 100644 --- a/GOVERNANCE.md +++ b/GOVERNANCE.md @@ -7,7 +7,7 @@ * [Technical Steering Committee](#technical-steering-committee) * [TSC Meetings](#tsc-meetings) * [Collaborator Nominations](#collaborator-nominations) - * [Onboarding](#onboarding) + * [Onboarding](#./onboarding) * [Consensus Seeking Process](#consensus-seeking-process) @@ -39,7 +39,7 @@ result in Collaborators removing their opposition. See: * [List of Collaborators](./README.md#current-project-team-members) -* [A guide for Collaborators](./COLLABORATOR_GUIDE.md) +* [A guide for Collaborators](./doc/guides/collaborator-guide.md) ### Collaborator Activities @@ -148,7 +148,7 @@ nomination. ### Onboarding After the nomination passes, a TSC member onboards the new Collaborator. See -[the onboarding guide](./doc/onboarding.md) for details of the onboarding +[the onboarding guide](./onboarding.md) for details of the onboarding process. ## Consensus Seeking Process diff --git a/Makefile b/Makefile index 34cdec7f776..3a97f15fc3c 100644 --- a/Makefile +++ b/Makefile @@ -924,12 +924,12 @@ endif .PHONY: release-only release-only: check-xz @if [ "$(DISTTYPE)" = "release" ] && `grep -q REPLACEME doc/api/*.md`; then \ - echo 'Please update REPLACEME in Added: tags in doc/api/*.md (See doc/releases.md)' ; \ + echo 'Please update REPLACEME in Added: tags in doc/api/*.md (See doc/guides/releases.md)' ; \ exit 1 ; \ fi @if [ "$(DISTTYPE)" = "release" ] && \ `grep -q DEP...X doc/api/deprecations.md`; then \ - echo 'Please update DEP...X in doc/api/deprecations.md (See doc/releases.md)' ; \ + echo 'Please update DEP...X in doc/api/deprecations.md (See doc/guides/releases.md)' ; \ exit 1 ; \ fi @if [ "$(shell git status --porcelain | egrep -v '^\?\? ')" = "" ]; then \ @@ -1003,6 +1003,7 @@ $(PKG): release-only --resources $(MACOSOUTDIR)/installer/productbuild/Resources \ --package-path $(MACOSOUTDIR)/pkgs ./$(PKG) SIGN="$(PRODUCTSIGN_CERT)" PKG="$(PKG)" bash tools/osx-productsign.sh + bash tools/osx-notarize.sh $(FULLVERSION) .PHONY: pkg # Builds the macOS installer for releases. @@ -1032,7 +1033,6 @@ $(TARBALL): release-only $(NODE_EXE) doc $(RM) -r $(TARNAME)/deps/v8/samples $(RM) -r $(TARNAME)/deps/v8/tools/profviz $(RM) -r $(TARNAME)/deps/v8/tools/run-tests.py - $(RM) -r $(TARNAME)/deps/zlib/contrib # too big, unused $(RM) -r $(TARNAME)/doc/images # too big $(RM) -r $(TARNAME)/test*.tap $(RM) -r $(TARNAME)/tools/cpplint.py @@ -1043,6 +1043,7 @@ $(TARBALL): release-only $(NODE_EXE) doc $(RM) -r $(TARNAME)/tools/osx-pkg.pmdoc find $(TARNAME)/deps/v8/test/* -type d ! -regex '.*/test/torque$$' | xargs $(RM) -r find $(TARNAME)/deps/v8/test -type f ! -regex '.*/test/torque/.*' | xargs $(RM) + find $(TARNAME)/deps/zlib/contrib/* -type d ! -regex '.*/contrib/optimizations$$' | xargs $(RM) -r find $(TARNAME)/ -name ".eslint*" -maxdepth 2 | xargs $(RM) find $(TARNAME)/ -type l | xargs $(RM) # annoying on windows tar -cf $(TARNAME).tar $(TARNAME) @@ -1165,6 +1166,7 @@ bench-addons-clean: .PHONY: lint-md-rollup lint-md-rollup: + $(RM) tools/.*mdlintstamp cd tools/node-lint-md-cli-rollup && npm install cd tools/node-lint-md-cli-rollup && npm run build-node @@ -1177,28 +1179,23 @@ lint-md-clean: lint-md-build: $(warning "Deprecated no-op target 'lint-md-build'") -LINT_MD_DOC_FILES = $(shell find doc -type f -name '*.md') -run-lint-doc-md = tools/lint-md.js -q -f $(LINT_MD_DOC_FILES) -# Lint all changed markdown files under doc/ -tools/.docmdlintstamp: $(LINT_MD_DOC_FILES) - @echo "Running Markdown linter on docs..." - @$(call available-node,$(run-lint-doc-md)) - @touch $@ +ifeq ("$(wildcard tools/.mdlintstamp)","") + LINT_MD_NEWER = +else + LINT_MD_NEWER = -newer tools/.mdlintstamp +endif -LINT_MD_TARGETS = src lib benchmark test tools/doc tools/icu -LINT_MD_ROOT_DOCS := $(wildcard *.md) -LINT_MD_MISC_FILES := $(shell find $(LINT_MD_TARGETS) -type f \ - ! -path '*node_modules*' ! -path 'test/fixtures/*' -name '*.md') \ - $(LINT_MD_ROOT_DOCS) -run-lint-misc-md = tools/lint-md.js -q -f $(LINT_MD_MISC_FILES) -# Lint other changed markdown files maintained by us -tools/.miscmdlintstamp: $(LINT_MD_MISC_FILES) - @echo "Running Markdown linter on misc docs..." - @$(call available-node,$(run-lint-misc-md)) +LINT_MD_TARGETS = doc src lib benchmark test tools/doc tools/icu $(wildcard *.md) +LINT_MD_FILES = $(shell find $(LINT_MD_TARGETS) -type f \ + ! -path '*node_modules*' ! -path 'test/fixtures/*' -name '*.md' \ + $(LINT_MD_NEWER)) +run-lint-md = tools/lint-md.js -q -f --no-stdout $(LINT_MD_FILES) +# Lint all changed markdown files maintained by us +tools/.mdlintstamp: $(LINT_MD_FILES) + @echo "Running Markdown linter..." + @$(call available-node,$(run-lint-md)) @touch $@ -tools/.mdlintstamp: tools/.miscmdlintstamp tools/.docmdlintstamp - .PHONY: lint-md # Lints the markdown documents maintained by us in the codebase. lint-md: | tools/.mdlintstamp diff --git a/README.md b/README.md index 07d6ad4e7ed..b073c034b7e 100644 --- a/README.md +++ b/README.md @@ -165,12 +165,8 @@ For information about the governance of the Node.js project, see **Daniel Bevenius** <daniel.bevenius@gmail.com> (he/him) * [fhinkel](https://github.com/fhinkel) - **Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her) -* [Fishrock123](https://github.com/Fishrock123) - -**Jeremiah Senkpiel** <fishrock123@rocketmail.com> * [gabrielschulhof](https://github.com/gabrielschulhof) - **Gabriel Schulhof** <gabriel.schulhof@intel.com> -* [gireeshpunathil](https://github.com/gireeshpunathil) - -**Gireesh Punathil** <gpunathi@in.ibm.com> (he/him) * [jasnell](https://github.com/jasnell) - **James M Snell** <jasnell@gmail.com> (he/him) * [joyeecheung](https://github.com/joyeecheung) - @@ -185,8 +181,6 @@ For information about the governance of the Node.js project, see **Sam Roberts** <vieuxtech@gmail.com> * [targos](https://github.com/targos) - **Michaël Zasso** <targos@protonmail.com> (he/him) -* [thefourtheye](https://github.com/thefourtheye) - -**Sakthipriyan Vairamani** <thechargingvolcano@gmail.com> (he/him) * [tniessen](https://github.com/tniessen) - **Tobias Nießen** <tniessen@tnie.de> * [Trott](https://github.com/Trott) - @@ -200,8 +194,12 @@ For information about the governance of the Node.js project, see **Chris Dickinson** <christopher.s.dickinson@gmail.com> * [evanlucas](https://github.com/evanlucas) - **Evan Lucas** <evanlucas@me.com> (he/him) +* [Fishrock123](https://github.com/Fishrock123) - +**Jeremiah Senkpiel** <fishrock123@rocketmail.com> (he/they) * [gibfahn](https://github.com/gibfahn) - **Gibson Fahnestock** <gibfahn@gmail.com> (he/him) +* [gireeshpunathil](https://github.com/gireeshpunathil) - +**Gireesh Punathil** <gpunathi@in.ibm.com> (he/him) * [indutny](https://github.com/indutny) - **Fedor Indutny** <fedor.indutny@gmail.com> * [isaacs](https://github.com/isaacs) - @@ -222,6 +220,8 @@ For information about the governance of the Node.js project, see **Rod Vagg** <r@va.gg> * [shigeki](https://github.com/shigeki) - **Shigeki Ohtsu** <ohtsu@ohtsu.org> (he/him) +* [thefourtheye](https://github.com/thefourtheye) - +**Sakthipriyan Vairamani** <thechargingvolcano@gmail.com> (he/him) * [TimothyGu](https://github.com/TimothyGu) - **Tiancheng "Timothy" Gu** <timothygu99@gmail.com> (he/him) * [trevnorris](https://github.com/trevnorris) - @@ -239,8 +239,6 @@ For information about the governance of the Node.js project, see **Anto Aravinth** <anto.aravinth.cse@gmail.com> (he/him) * [apapirovski](https://github.com/apapirovski) - **Anatoli Papirovski** <apapirovski@mac.com> (he/him) -* [aqrln](https://github.com/aqrln) - -**Alexey Orlenko** <eaglexrlnk@gmail.com> (he/him) * [bcoe](https://github.com/bcoe) - **Ben Coe** <bencoe@gmail.com> (he/him) * [bengl](https://github.com/bengl) - @@ -283,8 +281,6 @@ For information about the governance of the Node.js project, see **Hitesh Kanwathirtha** <digitalinfinity@gmail.com> (he/him) * [edsadr](https://github.com/edsadr) - **Adrian Estrada** <edsadr@gmail.com> (he/him) -* [eljefedelrodeodeljefe](https://github.com/eljefedelrodeodeljefe) - -**Robert Jefe Lindstaedt** <robert.lindstaedt@gmail.com> * [eugeneo](https://github.com/eugeneo) - **Eugene Ostroukhov** <eostroukhov@google.com> * [evanlucas](https://github.com/evanlucas) - @@ -292,7 +288,7 @@ For information about the governance of the Node.js project, see * [fhinkel](https://github.com/fhinkel) - **Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her) * [Fishrock123](https://github.com/Fishrock123) - -**Jeremiah Senkpiel** <fishrock123@rocketmail.com> +**Jeremiah Senkpiel** <fishrock123@rocketmail.com> (he/they) * [gabrielschulhof](https://github.com/gabrielschulhof) - **Gabriel Schulhof** <gabriel.schulhof@intel.com> * [gdams](https://github.com/gdams) - @@ -323,8 +319,6 @@ For information about the governance of the Node.js project, see **Jackson Tian** <shyvo1987@gmail.com> * [jasnell](https://github.com/jasnell) - **James M Snell** <jasnell@gmail.com> (he/him) -* [jbergstroem](https://github.com/jbergstroem) - -**Johan Bergström** <bugs@bergstroem.nu> * [jdalton](https://github.com/jdalton) - **John-David Dalton** <john.david.dalton@gmail.com> * [jkrems](https://github.com/jkrems) - @@ -333,8 +327,6 @@ For information about the governance of the Node.js project, see **João Reis** <reis@janeasystems.com> * [joyeecheung](https://github.com/joyeecheung) - **Joyee Cheung** <joyeec9h3@gmail.com> (she/her) -* [julianduque](https://github.com/julianduque) - -**Julian Duque** <julianduquej@gmail.com> (he/him) * [JungMinu](https://github.com/JungMinu) - **Minwoo Jung** <nodecorelab@gmail.com> (he/him) * [kfarnung](https://github.com/kfarnung) - @@ -349,8 +341,6 @@ For information about the governance of the Node.js project, see **Luigi Pinca** <luigipinca@gmail.com> (he/him) * [lundibundi](https://github.com/lundibundi) - **Denys Otrishko** <shishugi@gmail.com> (he/him) -* [maclover7](https://github.com/maclover7) - -**Jon Moss** <me@jonathanmoss.me> (he/him) * [mafintosh](https://github.com/mafintosh) - **Mathias Buus** <mathiasbuus@gmail.com> (he/him) * [mcollina](https://github.com/mcollina) - @@ -367,8 +357,6 @@ For information about the governance of the Node.js project, see **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - **Myles Borins** <myles.borins@gmail.com> (he/him) -* [not-an-aardvark](https://github.com/not-an-aardvark) - -**Teddy Katz** <teddy.katz@gmail.com> (he/him) * [ofrobots](https://github.com/ofrobots) - **Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) * [oyyd](https://github.com/oyyd) - @@ -415,8 +403,6 @@ For information about the governance of the Node.js project, see **Michaël Zasso** <targos@protonmail.com> (he/him) * [thefourtheye](https://github.com/thefourtheye) - **Sakthipriyan Vairamani** <thechargingvolcano@gmail.com> (he/him) -* [thekemkid](https://github.com/thekemkid) - -**Glen Keane** <glenkeane.94@gmail.com> (he/him) * [TimothyGu](https://github.com/TimothyGu) - **Tiancheng "Timothy" Gu** <timothygu99@gmail.com> (he/him) * [tniessen](https://github.com/tniessen) - @@ -450,6 +436,8 @@ For information about the governance of the Node.js project, see **Andras** <andras@kinvey.com> * [AnnaMag](https://github.com/AnnaMag) - **Anna M. Kedzierska** <anna.m.kedzierska@gmail.com> +* [aqrln](https://github.com/aqrln) - +**Alexey Orlenko** <eaglexrlnk@gmail.com> (he/him) * [brendanashworth](https://github.com/brendanashworth) - **Brendan Ashworth** <brendan.ashworth@me.com> * [calvinmetcalf](https://github.com/calvinmetcalf) - @@ -458,10 +446,14 @@ For information about the governance of the Node.js project, see **Chris Dickinson** <christopher.s.dickinson@gmail.com> * [DavidCai1993](https://github.com/DavidCai1993) - **David Cai** <davidcai1993@yahoo.com> (he/him) +* [eljefedelrodeodeljefe](https://github.com/eljefedelrodeodeljefe) - +**Robert Jefe Lindstaedt** <robert.lindstaedt@gmail.com> * [estliberitas](https://github.com/estliberitas) - **Alexander Makarenko** <estliberitas@gmail.com> * [firedfox](https://github.com/firedfox) - **Daniel Wang** <wangyang0123@gmail.com> +* [glentiki](https://github.com/glentiki) - +**Glen Keane** <glenkeane.94@gmail.com> (he/him) * [imran-iq](https://github.com/imran-iq) - **Imran Iqbal** <imran@imraniqbal.org> * [imyller](https://github.com/imyller) - @@ -470,16 +462,22 @@ For information about the governance of the Node.js project, see **Isaac Z. Schlueter** <i@izs.me> * [jasongin](https://github.com/jasongin) - **Jason Ginchereau** <jasongin@microsoft.com> +* [jbergstroem](https://github.com/jbergstroem) - +**Johan Bergström** <bugs@bergstroem.nu> * [jhamhader](https://github.com/jhamhader) - **Yuval Brik** <yuval@brik.org.il> * [joshgav](https://github.com/joshgav) - **Josh Gavant** <josh.gavant@outlook.com> +* [julianduque](https://github.com/julianduque) - +**Julian Duque** <julianduquej@gmail.com> (he/him) * [kunalspathak](https://github.com/kunalspathak) - **Kunal Pathak** <kunal.pathak@microsoft.com> * [lucamaraschi](https://github.com/lucamaraschi) - **Luca Maraschi** <luca.maraschi@gmail.com> (he/him) * [lxe](https://github.com/lxe) - **Aleksey Smolenchuk** <lxe@lxe.co> +* [maclover7](https://github.com/maclover7) - +**Jon Moss** <me@jonathanmoss.me> (he/him) * [matthewloring](https://github.com/matthewloring) - **Matthew Loring** <mattloring@google.com> * [micnic](https://github.com/micnic) - @@ -488,6 +486,8 @@ For information about the governance of the Node.js project, see **Mikeal Rogers** <mikeal.rogers@gmail.com> * [monsanto](https://github.com/monsanto) - **Christopher Monsanto** <chris@monsan.to> +* [not-an-aardvark](https://github.com/not-an-aardvark) - +**Teddy Katz** <teddy.katz@gmail.com> (he/him) * [Olegas](https://github.com/Olegas) - **Oleg Elifantiev** <oleg@elifantiev.ru> * [orangemocha](https://github.com/orangemocha) - @@ -529,7 +529,7 @@ For information about the governance of the Node.js project, see * [whitlockjc](https://github.com/whitlockjc) - **Jeremy Whitlock** <jwhitlock@apache.org> -Collaborators follow the [COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md) in +Collaborators follow the [Collaborator Guide](./doc/guides/collaborator-guide.md) in maintaining the Node.js project. ### Release Keys @@ -546,8 +546,6 @@ GPG keys used to sign Node.js releases: `77984A986EBC2AA786BC0F66B01FBB92821C587A` * **James M Snell** <jasnell@keybase.io> `71DCFD284A79C3B38668286BC97EC7A07EDE3FC1` -* **Jeremiah Senkpiel** <fishrock@keybase.io> -`FD3A5288F042B6850C66B31F09FE44734EB7990E` * **Michaël Zasso** <targos@protonmail.com> `8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600` * **Myles Borins** <myles.borins@gmail.com> @@ -568,7 +566,6 @@ gpg --keyserver pool.sks-keyservers.net --recv-keys 94AE36675C464D64BAFA68DD7434 gpg --keyserver pool.sks-keyservers.net --recv-keys B9AE9905FFD7803F25714661B63B535A4C206CA9 gpg --keyserver pool.sks-keyservers.net --recv-keys 77984A986EBC2AA786BC0F66B01FBB92821C587A gpg --keyserver pool.sks-keyservers.net --recv-keys 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1 -gpg --keyserver pool.sks-keyservers.net --recv-keys FD3A5288F042B6850C66B31F09FE44734EB7990E gpg --keyserver pool.sks-keyservers.net --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 gpg --keyserver pool.sks-keyservers.net --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 gpg --keyserver pool.sks-keyservers.net --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D @@ -580,6 +577,8 @@ use these keys to verify a downloaded file. Other keys used to sign some previous releases: +* **Jeremiah Senkpiel** <fishrock@keybase.io> +`FD3A5288F042B6850C66B31F09FE44734EB7990E` * **Chris Dickinson** <christopher.s.dickinson@gmail.com> `9554F04D7259F04124DE6B476D5A82AC7E37093B` * **Isaac Z. Schlueter** <i@izs.me> diff --git a/benchmark/README.md b/benchmark/README.md index c5fdad09347..6a40d7af3e1 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -5,7 +5,7 @@ of different Node.js implementations and different ways of writing JavaScript run by the built-in JavaScript engine. For a detailed guide on how to write and run benchmarks in this -directory, see [the guide on benchmarks](writing-and-running-benchmarks.md). +directory, see [the guide on benchmarks](../doc/guides/writing-and-running-benchmarks.md). ## Table of Contents @@ -76,17 +76,17 @@ writing benchmarks. ### `createBenchmark(fn, configs[, options])` -See [the guide on writing benchmarks](writing-and-running-benchmarks.md#basics-of-a-benchmark). +See [the guide on writing benchmarks](../doc/guides/writing-and-running-benchmarks.md#basics-of-a-benchmark). ### `default_http_benchmarker` The default benchmarker used to run HTTP benchmarks. -See [the guide on writing HTTP benchmarks](writing-and-running-benchmarks.md#creating-an-http-benchmark). +See [the guide on writing HTTP benchmarks](../doc/guides/writing-and-running-benchmarks.md#creating-an-http-benchmark). ### `PORT` The default port used to run HTTP benchmarks. -See [the guide on writing HTTP benchmarks](writing-and-running-benchmarks.md#creating-an-http-benchmark). +See [the guide on writing HTTP benchmarks](../doc/guides/writing-and-running-benchmarks.md#creating-an-http-benchmark). ### `sendResult(data)` diff --git a/benchmark/_cli.js b/benchmark/_cli.js index 771cc72bff1..eb6c4add979 100644 --- a/benchmark/_cli.js +++ b/benchmark/_cli.js @@ -6,15 +6,16 @@ const path = require('path'); // Create an object of all benchmark scripts const benchmarks = {}; fs.readdirSync(__dirname) - .filter((name) => fs.statSync(path.resolve(__dirname, name)).isDirectory()) + .filter((name) => { + return name !== 'fixtures' && + fs.statSync(path.resolve(__dirname, name)).isDirectory(); + }) .forEach((category) => { benchmarks[category] = fs.readdirSync(path.resolve(__dirname, category)) .filter((filename) => filename[0] !== '.' && filename[0] !== '_'); }); function CLI(usage, settings) { - if (!(this instanceof CLI)) return new CLI(usage, settings); - if (process.argv.length < 3) { this.abort(usage); // Abort will exit the process } @@ -22,6 +23,7 @@ function CLI(usage, settings) { this.usage = usage; this.optional = {}; this.items = []; + this.test = false; for (const argName of settings.arrayArgs) { this.optional[argName] = []; @@ -34,7 +36,7 @@ function CLI(usage, settings) { if (arg === '--') { // Only items can follow -- mode = 'item'; - } else if ('both' === mode && arg[0] === '-') { + } else if (mode === 'both' && arg[0] === '-') { // Optional arguments declaration if (arg[1] === '-') { @@ -61,6 +63,8 @@ function CLI(usage, settings) { // The next value can be either an option or an item mode = 'both'; + } else if (arg === 'test') { + this.test = true; } else if (['both', 'item'].includes(mode)) { // item arguments this.items.push(arg); @@ -83,9 +87,15 @@ CLI.prototype.abort = function(msg) { CLI.prototype.benchmarks = function() { const paths = []; + if (this.items.includes('all')) { + this.items = Object.keys(benchmarks); + } + for (const category of this.items) { - if (benchmarks[category] === undefined) - continue; + if (benchmarks[category] === undefined) { + console.error(`The "${category}" category does not exist.`); + process.exit(1); + } for (const scripts of benchmarks[category]) { if (this.shouldSkip(scripts)) continue; diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 821dab2d55e..d0f192e7594 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -43,9 +43,8 @@ class AutocannonBenchmarker { } if (!result || !result.requests || !result.requests.average) { return undefined; - } else { - return result.requests.average; } + return result.requests.average; } } @@ -58,10 +57,13 @@ class WrkBenchmarker { } create(options) { + const duration = typeof options.duration === 'number' ? + Math.max(options.duration, 1) : + options.duration; const args = [ - '-d', options.duration, + '-d', duration, '-c', options.connections, - '-t', 8, + '-t', Math.min(options.connections, require('os').cpus().length || 8), `http://127.0.0.1:${options.port}${options.path}`, ]; for (const field in options.headers) { @@ -77,9 +79,8 @@ class WrkBenchmarker { const throughput = match && +match[1]; if (!isFinite(throughput)) { return undefined; - } else { - return throughput; } + return throughput; } } @@ -89,7 +90,8 @@ class WrkBenchmarker { */ class TestDoubleBenchmarker { constructor(type) { - // `type` is the type ofbenchmarker. Possible values are 'http' and 'http2'. + // `type` is the type of benchmarker. Possible values are 'http' and + // 'http2'. this.name = `test-double-${type}`; this.executable = path.resolve(__dirname, '_test-double-benchmarker.js'); this.present = fs.existsSync(this.executable); @@ -97,10 +99,12 @@ class TestDoubleBenchmarker { } create(options) { - const env = Object.assign({ - duration: options.duration, + process.env.duration = process.env.duration || options.duration || 5; + + const env = { test_url: `http://127.0.0.1:${options.port}${options.path}`, - }, process.env); + ...process.env + }; const child = child_process.fork(this.executable, [this.type], @@ -189,13 +193,14 @@ http_benchmarkers.forEach((benchmarker) => { }); exports.run = function(options, callback) { - options = Object.assign({ + options = { port: exports.PORT, path: '/', connections: 100, duration: 5, benchmarker: exports.default_http_benchmarker, - }, options); + ...options + }; if (!options.benchmarker) { callback(new Error('Could not locate required http benchmarker. See ' + `${requirementsURL} for further instructions.`)); @@ -220,7 +225,8 @@ exports.run = function(options, callback) { child.stderr.pipe(process.stderr); let stdout = ''; - child.stdout.on('data', (chunk) => stdout += chunk.toString()); + child.stdout.setEncoding('utf8'); + child.stdout.on('data', (chunk) => stdout += chunk); child.once('close', (code) => { const elapsed = process.hrtime(benchmarker_start); diff --git a/benchmark/_test-double-benchmarker.js b/benchmark/_test-double-benchmarker.js index b9379b907ff..60264dfd46a 100644 --- a/benchmark/_test-double-benchmarker.js +++ b/benchmark/_test-double-benchmarker.js @@ -7,7 +7,7 @@ if (!['http', 'http2'].includes(myModule)) { const http = require(myModule); -const duration = process.env.duration || 0; +const duration = +process.env.duration; const url = process.env.test_url; const start = process.hrtime(); @@ -18,13 +18,15 @@ function request(res, client) { res.on('error', () => {}); res.on('end', () => { throughput++; - const diff = process.hrtime(start); - if (duration > 0 && diff[0] < duration) { + const [sec, nanosec] = process.hrtime(start); + const ms = sec * 1000 + nanosec / 1e6; + if (ms < duration * 1000) { run(); } else { console.log(JSON.stringify({ throughput })); if (client) { client.destroy(); + process.exit(0); } } }); @@ -33,7 +35,7 @@ function request(res, client) { function run() { if (http.get) { // HTTP http.get(url, request); - } else { // HTTP/2 + } else { // HTTP/2 const client = http.connect(url); client.on('error', (e) => { throw e; }); request(client.request(), client); diff --git a/benchmark/assert/deepequal-buffer.js b/benchmark/assert/deepequal-buffer.js index 6d9162f1517..69cca91cc6d 100644 --- a/benchmark/assert/deepequal-buffer.js +++ b/benchmark/assert/deepequal-buffer.js @@ -6,12 +6,10 @@ const bench = common.createBenchmark(main, { n: [2e4], len: [1e2, 1e3], strict: [0, 1], - method: [ 'deepEqual', 'notDeepEqual' ], + method: ['deepEqual', 'notDeepEqual'], }); function main({ len, n, method, strict }) { - if (!method) - method = 'deepEqual'; const data = Buffer.allocUnsafe(len + 1); const actual = Buffer.alloc(len); const expected = Buffer.alloc(len); diff --git a/benchmark/assert/deepequal-map.js b/benchmark/assert/deepequal-map.js index b88ecf7ce12..77408e3d103 100644 --- a/benchmark/assert/deepequal-map.js +++ b/benchmark/assert/deepequal-map.js @@ -34,8 +34,6 @@ function main({ n, len, method, strict }) { const array = Array(len).fill(1); switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'deepEqual_primitiveOnly': { const values = array.map((_, i) => [`str_${i}`, 123]); benchmark(strict ? deepStrictEqual : deepEqual, n, values); diff --git a/benchmark/assert/deepequal-object.js b/benchmark/assert/deepequal-object.js index e23f6692b3d..a8c539426a8 100644 --- a/benchmark/assert/deepequal-object.js +++ b/benchmark/assert/deepequal-object.js @@ -7,7 +7,7 @@ const bench = common.createBenchmark(main, { n: [5e3], size: [1e2, 1e3, 5e4], strict: [0, 1], - method: [ 'deepEqual', 'notDeepEqual' ], + method: ['deepEqual', 'notDeepEqual'], }); function createObj(source, add = '') { @@ -27,9 +27,6 @@ function main({ size, n, method, strict }) { // TODO: Fix this "hack". `n` should not be manipulated. n = Math.min(Math.ceil(n / size), 20); - if (!method) - method = 'deepEqual'; - const source = Array.apply(null, Array(size)); const actual = createObj(source); const expected = createObj(source); diff --git a/benchmark/assert/deepequal-prims-and-objs-big-array-set.js b/benchmark/assert/deepequal-prims-and-objs-big-array-set.js index 0e0ce450bb1..ad049ded02c 100644 --- a/benchmark/assert/deepequal-prims-and-objs-big-array-set.js +++ b/benchmark/assert/deepequal-prims-and-objs-big-array-set.js @@ -52,8 +52,6 @@ function main({ n, len, primitive, method, strict }) { const expectedWrongSet = new Set(expectedWrong); switch (method) { - // Empty string falls through to next line as default, mostly for tests. - case '': case 'deepEqual_Array': run(strict ? deepStrictEqual : deepEqual, n, actual, expected); break; diff --git a/benchmark/assert/deepequal-prims-and-objs-big-loop.js b/benchmark/assert/deepequal-prims-and-objs-big-loop.js index 32140f08ded..2d01431b1fc 100644 --- a/benchmark/assert/deepequal-prims-and-objs-big-loop.js +++ b/benchmark/assert/deepequal-prims-and-objs-big-loop.js @@ -13,12 +13,10 @@ const bench = common.createBenchmark(main, { primitive: Object.keys(primValues), n: [2e4], strict: [0, 1], - method: [ 'deepEqual', 'notDeepEqual' ], + method: ['deepEqual', 'notDeepEqual'], }); function main({ n, primitive, method, strict }) { - if (!method) - method = 'deepEqual'; const prim = primValues[primitive]; const actual = prim; const expected = prim; diff --git a/benchmark/assert/deepequal-set.js b/benchmark/assert/deepequal-set.js index 561a951e683..27ca7c92bce 100644 --- a/benchmark/assert/deepequal-set.js +++ b/benchmark/assert/deepequal-set.js @@ -34,8 +34,6 @@ function main({ n, len, method, strict }) { const array = Array(len).fill(1); switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'deepEqual_primitiveOnly': { const values = array.map((_, i) => `str_${i}`); benchmark(strict ? deepStrictEqual : deepEqual, n, values); diff --git a/benchmark/assert/deepequal-typedarrays.js b/benchmark/assert/deepequal-typedarrays.js index 10ba21a2575..188cfce695e 100644 --- a/benchmark/assert/deepequal-typedarrays.js +++ b/benchmark/assert/deepequal-typedarrays.js @@ -20,8 +20,6 @@ const bench = common.createBenchmark(main, { }); function main({ type, n, len, method, strict }) { - if (!method) - method = 'deepEqual'; const clazz = global[type]; const actual = new clazz(len); const expected = new clazz(len); diff --git a/benchmark/assert/throws.js b/benchmark/assert/throws.js index c80518377a8..978ad2f1b8b 100644 --- a/benchmark/assert/throws.js +++ b/benchmark/assert/throws.js @@ -15,8 +15,6 @@ function main({ n, method }) { const message = 'failure'; switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'doesNotThrow': bench.start(); for (let i = 0; i < n; ++i) { diff --git a/benchmark/async_hooks/async-resource-vs-destroy.js b/benchmark/async_hooks/async-resource-vs-destroy.js index 4464dd5f93e..da0b52afa0e 100644 --- a/benchmark/async_hooks/async-resource-vs-destroy.js +++ b/benchmark/async_hooks/async-resource-vs-destroy.js @@ -8,18 +8,17 @@ const common = require('../common.js'); const { createHook, executionAsyncResource, - executionAsyncId + executionAsyncId, + AsyncLocalStorage } = require('async_hooks'); const { createServer } = require('http'); -// Configuration for the http server -// there is no need for parameters in this test -const connections = 500; -const path = '/'; - const bench = common.createBenchmark(main, { - type: ['async-resource', 'destroy'], + type: ['async-resource', 'destroy', 'async-local-storage'], asyncMethod: ['callbacks', 'async'], + path: '/', + connections: 500, + duration: 5, n: [1e6] }); @@ -102,6 +101,35 @@ function buildDestroy(getServe) { } } +function buildAsyncLocalStorage(getServe) { + const asyncLocalStorage = new AsyncLocalStorage(); + const server = createServer((req, res) => { + asyncLocalStorage.runSyncAndReturn({}, () => { + getServe(getCLS, setCLS)(req, res); + }); + }); + + return { + server, + close + }; + + function getCLS() { + const store = asyncLocalStorage.getStore(); + return store.state; + } + + function setCLS(state) { + const store = asyncLocalStorage.getStore(); + store.state = state; + } + + function close() { + asyncLocalStorage.disable(); + server.close(); + } +} + function getServeAwait(getCLS, setCLS) { return async function serve(req, res) { setCLS(Math.random()); @@ -126,7 +154,8 @@ function getServeCallbacks(getCLS, setCLS) { const types = { 'async-resource': buildCurrentResource, - 'destroy': buildDestroy + 'destroy': buildDestroy, + 'async-local-storage': buildAsyncLocalStorage }; const asyncMethods = { @@ -134,7 +163,7 @@ const asyncMethods = { 'async': getServeAwait }; -function main({ type, asyncMethod }) { +function main({ type, asyncMethod, connections, duration, path }) { const { server, close } = types[type](asyncMethods[asyncMethod]); server @@ -143,7 +172,8 @@ function main({ type, asyncMethod }) { bench.http({ path, - connections + connections, + duration }, () => { close(); }); diff --git a/benchmark/async_hooks/http-server.js b/benchmark/async_hooks/http-server.js index 9e1c1214240..c8e44849b74 100644 --- a/benchmark/async_hooks/http-server.js +++ b/benchmark/async_hooks/http-server.js @@ -3,10 +3,11 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { asyncHooks: ['init', 'before', 'after', 'all', 'disabled', 'none'], - connections: [50, 500] + connections: [50, 500], + duration: 5 }); -function main({ asyncHooks, connections }) { +function main({ asyncHooks, connections, duration }) { if (asyncHooks !== 'none') { let hooks = { init() {}, @@ -33,6 +34,7 @@ function main({ asyncHooks, connections }) { bench.http({ connections, path, + duration }, () => { server.close(); }); diff --git a/benchmark/buffers/buffer-base64-encode.js b/benchmark/buffers/buffer-base64-encode.js index d8b601bbd18..9837828a353 100644 --- a/benchmark/buffers/buffer-base64-encode.js +++ b/benchmark/buffers/buffer-base64-encode.js @@ -25,6 +25,8 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { len: [64 * 1024 * 1024], n: [32] +}, { + test: { len: 256 } }); function main({ n, len }) { diff --git a/benchmark/buffers/buffer-bytelength.js b/benchmark/buffers/buffer-bytelength.js index 1b324a49f89..fbbe0f040da 100644 --- a/benchmark/buffers/buffer-bytelength.js +++ b/benchmark/buffers/buffer-bytelength.js @@ -17,9 +17,9 @@ const chars = [ function main({ n, len, encoding }) { let strings = []; - let results = [ len * 16 ]; + let results = [len * 16]; if (encoding === 'buffer') { - strings = [ Buffer.alloc(len * 16, 'a') ]; + strings = [Buffer.alloc(len * 16, 'a')]; } else { for (const string of chars) { // Strings must be built differently, depending on encoding diff --git a/benchmark/buffers/buffer-creation.js b/benchmark/buffers/buffer-creation.js index 38d80da915c..862bff4fbab 100644 --- a/benchmark/buffers/buffer-creation.js +++ b/benchmark/buffers/buffer-creation.js @@ -16,7 +16,6 @@ const bench = common.createBenchmark(main, { function main({ len, n, type }) { let fn, i; switch (type) { - case '': case 'fast-alloc': fn = Buffer.alloc; break; diff --git a/benchmark/buffers/buffer-fill.js b/benchmark/buffers/buffer-fill.js index 02bc2a206c6..8d4d4996068 100644 --- a/benchmark/buffers/buffer-fill.js +++ b/benchmark/buffers/buffer-fill.js @@ -22,7 +22,7 @@ function main({ n, type, size }) { const buffer = Buffer.allocUnsafe(size); const testFunction = new Function('b', ` for (var i = 0; i < ${n}; i++) { - b.${type || 'fill(0)'}; + b.${type}; } `); bench.start(); diff --git a/benchmark/buffers/buffer-iterate.js b/benchmark/buffers/buffer-iterate.js index de002108a95..3e2a897903d 100644 --- a/benchmark/buffers/buffer-iterate.js +++ b/benchmark/buffers/buffer-iterate.js @@ -21,7 +21,7 @@ function main({ size, type, method, n }) { Buffer.alloc(size) : SlowBuffer(size).fill(0); - const fn = methods[method || 'for']; + const fn = methods[method]; bench.start(); fn(buffer, n); diff --git a/benchmark/buffers/buffer-read-float.js b/benchmark/buffers/buffer-read-float.js index e8c4f8bf549..656762d1d4f 100644 --- a/benchmark/buffers/buffer-read-float.js +++ b/benchmark/buffers/buffer-read-float.js @@ -9,7 +9,6 @@ const bench = common.createBenchmark(main, { }); function main({ n, type, endian, value }) { - type = type || 'Double'; const buff = Buffer.alloc(8); const fn = `read${type}${endian}`; const values = { diff --git a/benchmark/buffers/buffer-read-with-byteLength.js b/benchmark/buffers/buffer-read-with-byteLength.js index 6ba1594b8ac..c51ab11ae98 100644 --- a/benchmark/buffers/buffer-read-with-byteLength.js +++ b/benchmark/buffers/buffer-read-with-byteLength.js @@ -19,7 +19,7 @@ function main({ n, buf, type, byteLength }) { const buff = buf === 'fast' ? Buffer.alloc(8) : require('buffer').SlowBuffer(8); - const fn = `read${type || 'IntBE'}`; + const fn = `read${type}`; buff.writeDoubleLE(0, 0); bench.start(); diff --git a/benchmark/buffers/buffer-read.js b/benchmark/buffers/buffer-read.js index 2ddca60df44..e0ec13992c2 100644 --- a/benchmark/buffers/buffer-read.js +++ b/benchmark/buffers/buffer-read.js @@ -28,7 +28,7 @@ function main({ n, buf, type }) { const buff = buf === 'fast' ? Buffer.alloc(8) : require('buffer').SlowBuffer(8); - const fn = `read${type || 'UInt8'}`; + const fn = `read${type}`; buff.writeDoubleLE(0, 0); bench.start(); diff --git a/benchmark/buffers/buffer-swap.js b/benchmark/buffers/buffer-swap.js index a33bac4ae3e..e43957efbdb 100644 --- a/benchmark/buffers/buffer-swap.js +++ b/benchmark/buffers/buffer-swap.js @@ -7,6 +7,8 @@ const bench = common.createBenchmark(main, { method: ['swap16', 'swap32', 'swap64'/* , 'htons', 'htonl', 'htonll' */], len: [64, 256, 768, 1024, 2056, 8192], n: [1e6] +}, { + test: { len: 16 } }); // The htons and htonl methods below are used to benchmark the @@ -74,7 +76,7 @@ function genMethod(method) { function main({ method, len, n, aligned = 'true' }) { const buf = createBuffer(len, aligned === 'true'); - const bufferSwap = genMethod(method || 'swap16'); + const bufferSwap = genMethod(method); bufferSwap(n, buf); bench.start(); diff --git a/benchmark/buffers/buffer-write.js b/benchmark/buffers/buffer-write.js index db5a57d0023..5025dd2bca0 100644 --- a/benchmark/buffers/buffer-write.js +++ b/benchmark/buffers/buffer-write.js @@ -74,7 +74,7 @@ function main({ n, buf, type }) { const buff = buf === 'fast' ? Buffer.alloc(8) : require('buffer').SlowBuffer(8); - const fn = `write${type || 'UInt8'}`; + const fn = `write${type}`; if (!/\d/.test(fn)) benchSpecialInt(buff, fn, n); diff --git a/benchmark/buffers/dataview-set.js b/benchmark/buffers/dataview-set.js index a741d11356e..b7914fe45f1 100644 --- a/benchmark/buffers/dataview-set.js +++ b/benchmark/buffers/dataview-set.js @@ -40,7 +40,6 @@ const mod = { }; function main({ n, type }) { - type = type || 'Uint8'; const ab = new ArrayBuffer(8); const dv = new DataView(ab, 0, 8); const le = /LE$/.test(type); diff --git a/benchmark/common.js b/benchmark/common.js index c5791c2bacf..d2103704ab2 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -3,222 +3,263 @@ const child_process = require('child_process'); const http_benchmarkers = require('./_http-benchmarkers.js'); -exports.buildType = process.features.debug ? 'Debug' : 'Release'; +class Benchmark { + // Used to make sure a benchmark only start a timer once + #started = false; -exports.createBenchmark = function(fn, configs, options) { - return new Benchmark(fn, configs, options); -}; + // Indicate that the benchmark ended + #ended = false; -function Benchmark(fn, configs, options) { - // Use the file name as the name of the benchmark - this.name = require.main.filename.slice(__dirname.length + 1); - // Parse job-specific configuration from the command line arguments - const parsed_args = this._parseArgs(process.argv.slice(2), configs); - this.options = parsed_args.cli; - this.extra_options = parsed_args.extra; - // The configuration list as a queue of jobs - this.queue = this._queue(this.options); - // The configuration of the current job, head of the queue - this.config = this.queue[0]; - // Execution arguments i.e. flags used to run the jobs - this.flags = []; - if (options && options.flags) { - this.flags = this.flags.concat(options.flags); - } - if (process.env.NODE_BENCHMARK_FLAGS) { - const flags = process.env.NODE_BENCHMARK_FLAGS.split(/\s+/); - this.flags = this.flags.concat(flags); - } // Holds process.hrtime value - this._time = [0, 0]; - // Used to make sure a benchmark only start a timer once - this._started = false; - this._ended = false; + #time = [0, 0]; - // this._run will use fork() to create a new process for each configuration - // combination. - if (process.env.hasOwnProperty('NODE_RUN_BENCHMARK_FN')) { - process.nextTick(() => fn(this.config)); - } else { - process.nextTick(() => this._run()); - } -} + // Use the file name as the name of the benchmark + name = require.main.filename.slice(__dirname.length + 1); -Benchmark.prototype._parseArgs = function(argv, configs) { - const cliOptions = {}; - const extraOptions = {}; - const validArgRE = /^(.+?)=([\s\S]*)$/; - // Parse configuration arguments - for (const arg of argv) { - const match = arg.match(validArgRE); - if (!match) { - console.error(`bad argument: ${arg}`); - process.exit(1); - } - const config = match[1]; - - if (configs[config]) { - // Infer the type from the config object and parse accordingly - const isNumber = typeof configs[config][0] === 'number'; - const value = isNumber ? +match[2] : match[2]; - if (!cliOptions[config]) - cliOptions[config] = []; - cliOptions[config].push(value); - } else { - extraOptions[config] = match[2]; + // Execution arguments i.e. flags used to run the jobs + flags = process.env.NODE_BENCHMARK_FLAGS ? + process.env.NODE_BENCHMARK_FLAGS.split(/\s+/) : + []; + + constructor(fn, configs, options = {}) { + // Parse job-specific configuration from the command line arguments + const argv = process.argv.slice(2); + const parsed_args = this._parseArgs(argv, configs, options); + this.options = parsed_args.cli; + this.extra_options = parsed_args.extra; + if (options.flags) { + this.flags = this.flags.concat(options.flags); } - } - return { cli: Object.assign({}, configs, cliOptions), extra: extraOptions }; -}; -Benchmark.prototype._queue = function(options) { - const queue = []; - const keys = Object.keys(options); + // The configuration list as a queue of jobs + this.queue = this._queue(this.options); - // Perform a depth-first walk though all options to generate a - // configuration list that contains all combinations. - function recursive(keyIndex, prevConfig) { - const key = keys[keyIndex]; - const values = options[key]; - const type = typeof values[0]; + // The configuration of the current job, head of the queue + this.config = this.queue[0]; - for (const value of values) { - if (typeof value !== 'number' && typeof value !== 'string') { - throw new TypeError(`configuration "${key}" had type ${typeof value}`); - } - if (typeof value !== type) { - // This is a requirement for being able to consistently and predictably - // parse CLI provided configuration values. - throw new TypeError(`configuration "${key}" has mixed types`); + process.nextTick(() => { + if (process.env.hasOwnProperty('NODE_RUN_BENCHMARK_FN')) { + fn(this.config); + } else { + // _run will use fork() to create a new process for each configuration + // combination. + this._run(); } + }); + } - const currConfig = Object.assign({ [key]: value }, prevConfig); + _parseArgs(argv, configs, options) { + const cliOptions = {}; + + // Check for the test mode first. + const testIndex = argv.indexOf('--test'); + if (testIndex !== -1) { + for (const [key, rawValue] of Object.entries(configs)) { + let value = Array.isArray(rawValue) ? rawValue[0] : rawValue; + // Set numbers to one by default to reduce the runtime. + if (typeof value === 'number') { + if (key === 'dur' || key === 'duration') { + value = 0.05; + } else if (value > 1) { + value = 1; + } + } + cliOptions[key] = [value]; + } + // Override specific test options. + if (options.test) { + for (const [key, value] of Object.entries(options.test)) { + cliOptions[key] = Array.isArray(value) ? value : [value]; + } + } + argv.splice(testIndex, 1); + } else { + // Accept single values instead of arrays. + for (const [key, value] of Object.entries(configs)) { + if (!Array.isArray(value)) + configs[key] = [value]; + } + } - if (keyIndex + 1 < keys.length) { - recursive(keyIndex + 1, currConfig); + const extraOptions = {}; + const validArgRE = /^(.+?)=([\s\S]*)$/; + // Parse configuration arguments + for (const arg of argv) { + const match = arg.match(validArgRE); + if (!match) { + console.error(`bad argument: ${arg}`); + process.exit(1); + } + const [, key, value] = match; + if (Object.prototype.hasOwnProperty.call(configs, key)) { + if (!cliOptions[key]) + cliOptions[key] = []; + cliOptions[key].push( + // Infer the type from the config object and parse accordingly + typeof configs[key][0] === 'number' ? +value : value + ); } else { - queue.push(currConfig); + extraOptions[key] = value; } } + return { cli: { ...configs, ...cliOptions }, extra: extraOptions }; } - if (keys.length > 0) { - recursive(0, {}); - } else { - queue.push({}); - } - - return queue; -}; - -// Benchmark an http server. -exports.default_http_benchmarker = - http_benchmarkers.default_http_benchmarker; -exports.PORT = http_benchmarkers.PORT; - -Benchmark.prototype.http = function(options, cb) { - const self = this; - const http_options = Object.assign({ }, options); - http_options.benchmarker = http_options.benchmarker || - self.config.benchmarker || - self.extra_options.benchmarker || - exports.default_http_benchmarker; - http_benchmarkers.run( - http_options, (error, code, used_benchmarker, result, elapsed) => { - if (cb) { - cb(code); - } - if (error) { - console.error(error); - process.exit(code || 1); + _queue(options) { + const queue = []; + const keys = Object.keys(options); + + // Perform a depth-first walk through all options to generate a + // configuration list that contains all combinations. + function recursive(keyIndex, prevConfig) { + const key = keys[keyIndex]; + const values = options[key]; + + for (const value of values) { + if (typeof value !== 'number' && typeof value !== 'string') { + throw new TypeError( + `configuration "${key}" had type ${typeof value}`); + } + if (typeof value !== typeof values[0]) { + // This is a requirement for being able to consistently and + // predictably parse CLI provided configuration values. + throw new TypeError(`configuration "${key}" has mixed types`); + } + + const currConfig = { [key]: value, ...prevConfig }; + + if (keyIndex + 1 < keys.length) { + recursive(keyIndex + 1, currConfig); + } else { + queue.push(currConfig); + } } - self.config.benchmarker = used_benchmarker; - self.report(result, elapsed); } - ); -}; -Benchmark.prototype._run = function() { - const self = this; - // If forked, report to the parent. - if (process.send) { - process.send({ - type: 'config', - name: this.name, - queueLength: this.queue.length, - }); - } + if (keys.length > 0) { + recursive(0, {}); + } else { + queue.push({}); + } - (function recursive(queueIndex) { - const config = self.queue[queueIndex]; + return queue; + } - // Set NODE_RUN_BENCHMARK_FN to indicate that the child shouldn't construct - // a configuration queue, but just execute the benchmark function. - const childEnv = Object.assign({}, process.env); - childEnv.NODE_RUN_BENCHMARK_FN = ''; + http(options, cb) { + const http_options = { ...options }; + http_options.benchmarker = http_options.benchmarker || + this.config.benchmarker || + this.extra_options.benchmarker || + http_benchmarkers.default_http_benchmarker; + http_benchmarkers.run( + http_options, (error, code, used_benchmarker, result, elapsed) => { + if (cb) { + cb(code); + } + if (error) { + console.error(error); + process.exit(code || 1); + } + this.config.benchmarker = used_benchmarker; + this.report(result, elapsed); + } + ); + } - // Create configuration arguments - const childArgs = []; - for (const key of Object.keys(config)) { - childArgs.push(`${key}=${config[key]}`); - } - for (const key of Object.keys(self.extra_options)) { - childArgs.push(`${key}=${self.extra_options[key]}`); + _run() { + // If forked, report to the parent. + if (process.send) { + process.send({ + type: 'config', + name: this.name, + queueLength: this.queue.length, + }); } - const child = child_process.fork(require.main.filename, childArgs, { - env: childEnv, - execArgv: self.flags.concat(process.execArgv), - }); - child.on('message', sendResult); - child.on('close', (code) => { - if (code) { - process.exit(code); - } + const recursive = (queueIndex) => { + const config = this.queue[queueIndex]; + + // Set NODE_RUN_BENCHMARK_FN to indicate that the child shouldn't + // construct a configuration queue, but just execute the benchmark + // function. + const childEnv = { ...process.env }; + childEnv.NODE_RUN_BENCHMARK_FN = ''; - if (queueIndex + 1 < self.queue.length) { - recursive(queueIndex + 1); + // Create configuration arguments + const childArgs = []; + for (const [key, value] of Object.entries(config)) { + childArgs.push(`${key}=${value}`); + } + for (const [key, value] of Object.entries(this.extra_options)) { + childArgs.push(`${key}=${value}`); } - }); - })(0); -}; -Benchmark.prototype.start = function() { - if (this._started) { - throw new Error('Called start more than once in a single benchmark'); + const child = child_process.fork(require.main.filename, childArgs, { + env: childEnv, + execArgv: this.flags.concat(process.execArgv), + }); + child.on('message', sendResult); + child.on('close', (code) => { + if (code) { + process.exit(code); + } + + if (queueIndex + 1 < this.queue.length) { + recursive(queueIndex + 1); + } + }); + }; + + recursive(0); } - this._started = true; - this._time = process.hrtime(); -}; - -Benchmark.prototype.end = function(operations) { - // Get elapsed time now and do error checking later for accuracy. - const elapsed = process.hrtime(this._time); - if (!this._started) { - throw new Error('called end without start'); - } - if (this._ended) { - throw new Error('called end multiple times'); - } - if (typeof operations !== 'number') { - throw new Error('called end() without specifying operation count'); - } - if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED && operations <= 0) { - throw new Error('called end() with operation count <= 0'); + start() { + if (this.#started) { + throw new Error('Called start more than once in a single benchmark'); + } + this.#started = true; + this.#time = process.hrtime(); } - if (elapsed[0] === 0 && elapsed[1] === 0) { - if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED) - throw new Error('insufficient clock precision for short benchmark'); - // Avoid dividing by zero - elapsed[1] = 1; + + end(operations) { + // Get elapsed time now and do error checking later for accuracy. + const elapsed = process.hrtime(this.#time); + + if (!this.#started) { + throw new Error('called end without start'); + } + if (this.#ended) { + throw new Error('called end multiple times'); + } + if (typeof operations !== 'number') { + throw new Error('called end() without specifying operation count'); + } + if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED && operations <= 0) { + throw new Error('called end() with operation count <= 0'); + } + if (elapsed[0] === 0 && elapsed[1] === 0) { + if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED) + throw new Error('insufficient clock precision for short benchmark'); + // Avoid dividing by zero + elapsed[1] = 1; + } + + this.#ended = true; + const time = elapsed[0] + elapsed[1] / 1e9; + const rate = operations / time; + this.report(rate, elapsed); } - this._ended = true; - const time = elapsed[0] + elapsed[1] / 1e9; - const rate = operations / time; - this.report(rate, elapsed); -}; + report(rate, elapsed) { + sendResult({ + name: this.name, + conf: this.config, + rate, + time: elapsed[0] + elapsed[1] / 1e9, + type: 'report', + }); + } +} function formatResult(data) { // Construct configuration string, " A=a, B=b, ..." @@ -242,27 +283,6 @@ function sendResult(data) { console.log(formatResult(data)); } } -exports.sendResult = sendResult; - -Benchmark.prototype.report = function(rate, elapsed) { - sendResult({ - name: this.name, - conf: this.config, - rate: rate, - time: elapsed[0] + elapsed[1] / 1e9, - type: 'report', - }); -}; - -exports.binding = function(bindingName) { - try { - const { internalBinding } = require('internal/test/binding'); - - return internalBinding(bindingName); - } catch { - return process.binding(bindingName); - } -}; const urls = { long: 'http://nodejs.org:89/docs/latest/api/foo/bar/qua/13949281/0f28b/' + @@ -278,7 +298,6 @@ const urls = { percent: 'https://%E4%BD%A0/foo', dot: 'https://example.org/./a/../b/./c', }; -exports.urls = urls; const searchParams = { noencode: 'foo=bar&baz=quux&xyzzy=thud', @@ -293,7 +312,6 @@ const searchParams = { manyblankpairs: '&&&&&&&&&&&&&&&&&&&&&&&&', altspaces: 'foo+bar=baz+quux&xyzzy+thud=quuy+quuz&abc=def+ghi', }; -exports.searchParams = searchParams; function getUrlData(withBase) { const data = require('../test/fixtures/wpt/url/resources/urltestdata.json'); @@ -309,8 +327,6 @@ function getUrlData(withBase) { return result; } -exports.urlDataTypes = Object.keys(urls).concat(['wpt']); - /** * Generate an array of data for URL benchmarks to use. * The size of the resulting data set is the original data size * 2 ** `e`. @@ -354,4 +370,26 @@ function bakeUrlData(type, e = 0, withBase = false, asUrl = false) { } return result; } -exports.bakeUrlData = bakeUrlData; + +module.exports = { + Benchmark, + PORT: http_benchmarkers.PORT, + bakeUrlData, + binding(bindingName) { + try { + const { internalBinding } = require('internal/test/binding'); + + return internalBinding(bindingName); + } catch { + return process.binding(bindingName); + } + }, + buildType: process.features.debug ? 'Debug' : 'Release', + createBenchmark(fn, configs, options) { + return new Benchmark(fn, configs, options); + }, + sendResult, + searchParams, + urlDataTypes: Object.keys(urls).concat(['wpt']), + urls, +}; diff --git a/benchmark/compare.js b/benchmark/compare.js index 53f82bb4b9f..5c9cd03be3f 100644 --- a/benchmark/compare.js +++ b/benchmark/compare.js @@ -9,7 +9,7 @@ const BenchmarkProgress = require('./_benchmark_progress.js'); // // Parse arguments // -const cli = CLI(`usage: ./node compare.js [options] [--] ... +const cli = new CLI(`usage: ./node compare.js [options] [--] ... Run each benchmark in the directory many times using two different node versions. More than one directory can be specified. The output is formatted as csv, which can be processed using for diff --git a/benchmark/crypto/aes-gcm-throughput.js b/benchmark/crypto/aes-gcm-throughput.js index b1b08c48170..3f2b9ba45eb 100644 --- a/benchmark/crypto/aes-gcm-throughput.js +++ b/benchmark/crypto/aes-gcm-throughput.js @@ -9,9 +9,6 @@ const bench = common.createBenchmark(main, { }); function main({ n, len, cipher }) { - // Default cipher for tests. - if (cipher === '') - cipher = 'aes-128-gcm'; const message = Buffer.alloc(len, 'b'); const key = crypto.randomBytes(keylen[cipher]); const iv = crypto.randomBytes(12); diff --git a/benchmark/crypto/cipher-stream.js b/benchmark/crypto/cipher-stream.js index 4bb1695e2d2..47a8931a540 100644 --- a/benchmark/crypto/cipher-stream.js +++ b/benchmark/crypto/cipher-stream.js @@ -3,16 +3,15 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { writes: [500], - cipher: [ 'AES192', 'AES256' ], + cipher: ['AES192', 'AES256'], type: ['asc', 'utf', 'buf'], len: [2, 1024, 102400, 1024 * 1024], api: ['legacy', 'stream'] +}, { + flags: ['--no-warnings'] }); function main({ api, cipher, type, len, writes }) { - // Default cipher for tests. - if (cipher === '') - cipher = 'AES192'; if (api === 'stream' && /^v0\.[0-8]\./.test(process.version)) { console.error('Crypto streams not available until v0.10'); // Use the legacy, just so that we can compare them. @@ -27,7 +26,6 @@ function main({ api, cipher, type, len, writes }) { alice.generateKeys(); bob.generateKeys(); - const pubEnc = /^v0\.[0-8]/.test(process.version) ? 'binary' : null; const alice_secret = alice.computeSecret(bob.getPublicKey(), pubEnc, 'hex'); const bob_secret = bob.computeSecret(alice.getPublicKey(), pubEnc, 'hex'); diff --git a/benchmark/es/defaultparams-bench.js b/benchmark/es/defaultparams-bench.js index ab9cc45749c..fde4cb11ad4 100644 --- a/benchmark/es/defaultparams-bench.js +++ b/benchmark/es/defaultparams-bench.js @@ -36,8 +36,6 @@ function runDefaultParams(n) { function main({ n, method }) { switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'withoutdefaults': runOldStyleDefaults(n); break; diff --git a/benchmark/es/destructuring-bench.js b/benchmark/es/destructuring-bench.js index f1b484bd47e..c07c0383da9 100644 --- a/benchmark/es/destructuring-bench.js +++ b/benchmark/es/destructuring-bench.js @@ -36,8 +36,6 @@ function runSwapDestructured(n) { function main({ n, method }) { switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'swap': runSwapManual(n); break; diff --git a/benchmark/es/destructuring-object-bench.js b/benchmark/es/destructuring-object-bench.js index 68dc17073ba..29c83bd188e 100644 --- a/benchmark/es/destructuring-object-bench.js +++ b/benchmark/es/destructuring-object-bench.js @@ -33,8 +33,6 @@ function runDestructured(n) { function main({ n, method }) { switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'normal': runNormal(n); break; diff --git a/benchmark/es/foreach-bench.js b/benchmark/es/foreach-bench.js index 88bfed00fbc..6992a1a5749 100644 --- a/benchmark/es/foreach-bench.js +++ b/benchmark/es/foreach-bench.js @@ -54,8 +54,6 @@ function main({ n, count, method }) { items[i] = i; switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'for': fn = useFor; break; diff --git a/benchmark/es/map-bench.js b/benchmark/es/map-bench.js index 1b3ba4789db..d0b8534cf7c 100644 --- a/benchmark/es/map-bench.js +++ b/benchmark/es/map-bench.js @@ -104,8 +104,6 @@ function runMap(n) { function main({ n, method }) { switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'object': runObject(n); break; diff --git a/benchmark/es/restparams-bench.js b/benchmark/es/restparams-bench.js index d568e287445..8129bc92533 100644 --- a/benchmark/es/restparams-bench.js +++ b/benchmark/es/restparams-bench.js @@ -51,8 +51,6 @@ function runUseArguments(n) { function main({ n, method }) { let fn; switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'copy': fn = runCopyArguments; break; diff --git a/benchmark/es/spread-assign.js b/benchmark/es/spread-assign.js index 97a5c5458e3..970512aa6b9 100644 --- a/benchmark/es/spread-assign.js +++ b/benchmark/es/spread-assign.js @@ -18,8 +18,6 @@ function main({ n, context, count, rest, method }) { let obj; // eslint-disable-line no-unused-vars switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case '_extend': bench.start(); for (let i = 0; i < n; i++) diff --git a/benchmark/es/spread-bench.js b/benchmark/es/spread-bench.js index fc56c9433f2..ae5b4abbb99 100644 --- a/benchmark/es/spread-bench.js +++ b/benchmark/es/spread-bench.js @@ -32,8 +32,6 @@ function main({ n, context, count, rest, method }) { args[i] = i; switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'apply': bench.start(); for (let i = 0; i < n; i++) diff --git a/benchmark/es/string-concatenations.js b/benchmark/es/string-concatenations.js index 72fb7f9969b..3c0b27e0501 100644 --- a/benchmark/es/string-concatenations.js +++ b/benchmark/es/string-concatenations.js @@ -23,8 +23,6 @@ function main({ n, mode }) { let string; switch (mode) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'multi-concat': bench.start(); for (let i = 0; i < n; i++) diff --git a/benchmark/es/string-repeat.js b/benchmark/es/string-repeat.js index 9e33e4acf47..f4bd616e4ad 100644 --- a/benchmark/es/string-repeat.js +++ b/benchmark/es/string-repeat.js @@ -18,8 +18,6 @@ function main({ n, size, encoding, mode }) { let str; switch (mode) { - case '': - // Empty string falls through to next line as default, mostly for tests. case 'Array': bench.start(); for (let i = 0; i < n; i++) diff --git a/benchmark/fs/read-stream-throughput.js b/benchmark/fs/read-stream-throughput.js index 34c25760ea9..5984317ff91 100644 --- a/benchmark/fs/read-stream-throughput.js +++ b/benchmark/fs/read-stream-throughput.js @@ -11,19 +11,18 @@ tmpdir.refresh(); const filename = path.resolve(tmpdir.path, `.removeme-benchmark-garbage-${process.pid}`); -let encodingType, encoding, size, filesize; - const bench = common.createBenchmark(main, { encodingType: ['buf', 'asc', 'utf'], - filesize: [1000 * 1024 * 1024], - size: [1024, 4096, 65535, 1024 * 1024] + filesize: [1000 * 1024], + highWaterMark: [1024, 4096, 65535, 1024 * 1024], + n: 1024 }); function main(conf) { - encodingType = conf.encodingType; - size = conf.size; - filesize = conf.filesize; + const { encodingType, highWaterMark, filesize } = conf; + let { n } = conf; + let encoding = ''; switch (encodingType) { case 'buf': encoding = null; @@ -38,34 +37,8 @@ function main(conf) { throw new Error(`invalid encodingType: ${encodingType}`); } - makeFile(); -} - -function runTest() { - assert(fs.statSync(filename).size === filesize); - const rs = fs.createReadStream(filename, { - highWaterMark: size, - encoding: encoding - }); - - rs.on('open', () => { - bench.start(); - }); - - let bytes = 0; - rs.on('data', (chunk) => { - bytes += chunk.length; - }); - - rs.on('end', () => { - try { fs.unlinkSync(filename); } catch {} - // MB/sec - bench.end(bytes / (1024 * 1024)); - }); -} - -function makeFile() { - const buf = Buffer.allocUnsafe(filesize / 1024); + // Make file + const buf = Buffer.allocUnsafe(filesize); if (encoding === 'utf8') { // ü for (let i = 0; i < buf.length; i++) { @@ -78,16 +51,38 @@ function makeFile() { } try { fs.unlinkSync(filename); } catch {} - let w = 1024; const ws = fs.createWriteStream(filename); - ws.on('close', runTest); + ws.on('close', runTest.bind(null, filesize, highWaterMark, encoding, n)); ws.on('drain', write); write(); function write() { do { - w--; - } while (false !== ws.write(buf) && w > 0); - if (w === 0) + n--; + } while (false !== ws.write(buf) && n > 0); + if (n === 0) ws.end(); } } + +function runTest(filesize, highWaterMark, encoding, n) { + assert(fs.statSync(filename).size === filesize * n); + const rs = fs.createReadStream(filename, { + highWaterMark, + encoding + }); + + rs.on('open', () => { + bench.start(); + }); + + let bytes = 0; + rs.on('data', (chunk) => { + bytes += chunk.length; + }); + + rs.on('end', () => { + try { fs.unlinkSync(filename); } catch {} + // MB/sec + bench.end(bytes / (1024 * 1024)); + }); +} diff --git a/benchmark/fs/readfile.js b/benchmark/fs/readfile.js index 361ffbff597..3f996e02ede 100644 --- a/benchmark/fs/readfile.js +++ b/benchmark/fs/readfile.js @@ -14,12 +14,12 @@ const filename = path.resolve(tmpdir.path, `.removeme-benchmark-garbage-${process.pid}`); const bench = common.createBenchmark(main, { - dur: [5], + duration: [5], len: [1024, 16 * 1024 * 1024], concurrent: [1, 10] }); -function main({ len, dur, concurrent }) { +function main({ len, duration, concurrent }) { try { fs.unlinkSync(filename); } catch {} let data = Buffer.alloc(len, 'x'); fs.writeFileSync(filename, data); @@ -33,7 +33,7 @@ function main({ len, dur, concurrent }) { bench.end(reads); try { fs.unlinkSync(filename); } catch {} process.exit(0); - }, dur * 1000); + }, duration * 1000); function read() { fs.readFile(filename, afterRead); diff --git a/benchmark/http/chunked.js b/benchmark/http/chunked.js index 52b4605715c..9ae7bb7495f 100644 --- a/benchmark/http/chunked.js +++ b/benchmark/http/chunked.js @@ -13,10 +13,11 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { n: [1, 4, 8, 16], len: [1, 64, 256], - c: [100] + c: [100], + duration: 5 }); -function main({ len, n, c }) { +function main({ len, n, c, duration }) { const http = require('http'); const chunk = Buffer.alloc(len, '8'); @@ -33,7 +34,8 @@ function main({ len, n, c }) { server.listen(common.PORT, () => { bench.http({ - connections: c + connections: c, + duration }, () => { server.close(); }); diff --git a/benchmark/http/cluster.js b/benchmark/http/cluster.js index 3bcd061a089..0d97b516ec5 100644 --- a/benchmark/http/cluster.js +++ b/benchmark/http/cluster.js @@ -9,14 +9,15 @@ if (cluster.isMaster) { // Unicode confuses ab on os x. type: ['bytes', 'buffer'], len: [4, 1024, 102400], - c: [50, 500] + c: [50, 500], + duration: 5, }); } else { const port = parseInt(process.env.PORT || PORT); require('../fixtures/simple-http-server.js').listen(port); } -function main({ type, len, c }) { +function main({ type, len, c, duration }) { process.env.PORT = PORT; let workers = 0; const w1 = cluster.fork(); @@ -32,7 +33,8 @@ function main({ type, len, c }) { bench.http({ path: path, - connections: c + connections: c, + duration }, () => { w1.destroy(); w2.destroy(); diff --git a/benchmark/http/end-vs-write-end.js b/benchmark/http/end-vs-write-end.js index 38e9b89a97a..60174ef3adf 100644 --- a/benchmark/http/end-vs-write-end.js +++ b/benchmark/http/end-vs-write-end.js @@ -14,10 +14,11 @@ const bench = common.createBenchmark(main, { type: ['asc', 'utf', 'buf'], len: [64 * 1024, 128 * 1024, 256 * 1024, 1024 * 1024], c: [100], - method: ['write', 'end'] + method: ['write', 'end'], + duration: 5 }); -function main({ len, type, method, c }) { +function main({ len, type, method, c, duration }) { const http = require('http'); let chunk; switch (type) { @@ -49,7 +50,8 @@ function main({ len, type, method, c }) { server.listen(common.PORT, () => { bench.http({ - connections: c + connections: c, + duration }, () => { server.close(); }); diff --git a/benchmark/http/headers.js b/benchmark/http/headers.js index f8014a6a085..b83ac17e742 100644 --- a/benchmark/http/headers.js +++ b/benchmark/http/headers.js @@ -6,9 +6,10 @@ const http = require('http'); const bench = common.createBenchmark(main, { n: [10, 1000], len: [1, 100], + duration: 5 }); -function main({ len, n }) { +function main({ len, n, duration }) { const headers = { 'Connection': 'keep-alive', 'Transfer-Encoding': 'chunked', @@ -29,7 +30,8 @@ function main({ len, n }) { server.listen(common.PORT, () => { bench.http({ path: '/', - connections: 10 + connections: 10, + duration }, () => { server.close(); }); diff --git a/benchmark/http/incoming_headers.js b/benchmark/http/incoming_headers.js index 810c92687bd..983bd5632fc 100644 --- a/benchmark/http/incoming_headers.js +++ b/benchmark/http/incoming_headers.js @@ -3,12 +3,13 @@ const common = require('../common.js'); const http = require('http'); const bench = common.createBenchmark(main, { - c: [50], // Concurrent connections - n: [20], // Number of header lines to append after the common headers - w: [0, 6], // Amount of trailing whitespace + connections: [50], // Concurrent connections + headers: [20], // Number of header lines to append after the common headers + w: [0, 6], // Amount of trailing whitespace + duration: 5 }); -function main({ c, n, w }) { +function main({ connections, headers, w, duration }) { const server = http.createServer((req, res) => { res.end(); }); @@ -21,7 +22,7 @@ function main({ c, n, w }) { 'Date': new Date().toString(), 'Cache-Control': 'no-cache' }; - for (let i = 0; i < n; i++) { + for (let i = 0; i < headers; i++) { // Note: // - autocannon does not send header values with OWS // - wrk can only send trailing OWS. This is a side-effect of wrk @@ -31,8 +32,9 @@ function main({ c, n, w }) { } bench.http({ path: '/', - connections: c, - headers + connections, + headers, + duration }, () => { server.close(); }); diff --git a/benchmark/http/set-header.js b/benchmark/http/set-header.js index 1909c0991df..48e0163a6ce 100644 --- a/benchmark/http/set-header.js +++ b/benchmark/http/set-header.js @@ -3,7 +3,8 @@ const common = require('../common.js'); const PORT = common.PORT; const bench = common.createBenchmark(main, { - res: ['normal', 'setHeader', 'setHeaderWH'] + res: ['normal', 'setHeader', 'setHeaderWH'], + duration: 5 }); const type = 'bytes'; @@ -15,16 +16,17 @@ const c = 50; // normal: writeHead(status, {...}) // setHeader: statusCode = status, setHeader(...) x2 // setHeaderWH: setHeader(...), writeHead(status, ...) -function main({ res }) { +function main({ res, duration }) { process.env.PORT = PORT; const server = require('../fixtures/simple-http-server.js') .listen(PORT) .on('listening', () => { - const path = `/${type}/${len}/${chunks}/normal/${chunkedEnc}`; + const path = `/${type}/${len}/${chunks}/${res}/${chunkedEnc}`; bench.http({ path: path, - connections: c + connections: c, + duration }, () => { server.close(); }); diff --git a/benchmark/http/simple.js b/benchmark/http/simple.js index 95409faa9a8..095b15ca446 100644 --- a/benchmark/http/simple.js +++ b/benchmark/http/simple.js @@ -7,18 +7,20 @@ const bench = common.createBenchmark(main, { len: [4, 1024, 102400], chunks: [1, 4], c: [50, 500], - chunkedEnc: [1, 0] + chunkedEnc: [1, 0], + duration: 5 }); -function main({ type, len, chunks, c, chunkedEnc, res }) { +function main({ type, len, chunks, c, chunkedEnc, duration }) { const server = require('../fixtures/simple-http-server.js') .listen(common.PORT) .on('listening', () => { const path = `/${type}/${len}/${chunks}/normal/${chunkedEnc}`; bench.http({ - path: path, - connections: c + path, + connections: c, + duration }, () => { server.close(); }); diff --git a/benchmark/http2/compat.js b/benchmark/http2/compat.js index 5d06ccf3178..2c7e732b07f 100644 --- a/benchmark/http2/compat.js +++ b/benchmark/http2/compat.js @@ -9,10 +9,11 @@ const bench = common.createBenchmark(main, { requests: [100, 1000, 5000], streams: [1, 10, 20, 40, 100, 200], clients: [2], - benchmarker: ['h2load'] + benchmarker: ['test-double-http2'], + duration: 5 }, { flags: ['--no-warnings'] }); -function main({ requests, streams, clients }) { +function main({ requests, streams, clients, duration }) { const http2 = require('http2'); const server = http2.createServer(); server.on('request', (req, res) => { @@ -29,7 +30,8 @@ function main({ requests, streams, clients }) { requests, maxConcurrentStreams: streams, clients, - threads: clients + threads: clients, + duration }, () => { server.close(); }); }); } diff --git a/benchmark/http2/respond-with-fd.js b/benchmark/http2/respond-with-fd.js index 35856490f7e..5bf5988d16a 100644 --- a/benchmark/http2/respond-with-fd.js +++ b/benchmark/http2/respond-with-fd.js @@ -10,10 +10,11 @@ const bench = common.createBenchmark(main, { requests: [100, 1000, 5000], streams: [1, 10, 20, 40, 100, 200], clients: [2], - benchmarker: ['h2load'] + benchmarker: ['test-double-http2'], + duration: 5 }, { flags: ['--no-warnings'] }); -function main({ requests, streams, clients }) { +function main({ requests, streams, clients, duration }) { fs.open(file, 'r', (err, fd) => { if (err) throw err; @@ -30,6 +31,7 @@ function main({ requests, streams, clients }) { requests, maxConcurrentStreams: streams, clients, + duration, threads: clients }, () => server.close()); }); diff --git a/benchmark/http2/simple.js b/benchmark/http2/simple.js index aab7c6b609b..929c4c655e1 100644 --- a/benchmark/http2/simple.js +++ b/benchmark/http2/simple.js @@ -9,10 +9,11 @@ const bench = common.createBenchmark(main, { requests: [100, 1000, 5000], streams: [1, 10, 20, 40, 100, 200], clients: [2], - benchmarker: ['h2load'] + benchmarker: ['test-double-http2'], + duration: 5 }, { flags: ['--no-warnings'] }); -function main({ requests, streams, clients }) { +function main({ requests, streams, clients, duration }) { const http2 = require('http2'); const server = http2.createServer(); server.on('stream', (stream) => { @@ -27,6 +28,7 @@ function main({ requests, streams, clients }) { requests, maxConcurrentStreams: streams, clients, + duration, threads: clients }, () => { server.close(); }); }); diff --git a/benchmark/http2/write.js b/benchmark/http2/write.js index fc3203c6e55..7ea8b2c02da 100644 --- a/benchmark/http2/write.js +++ b/benchmark/http2/write.js @@ -6,10 +6,11 @@ const bench = common.createBenchmark(main, { streams: [100, 200, 1000], length: [64 * 1024, 128 * 1024, 256 * 1024, 1024 * 1024], size: [100000], - benchmarker: ['h2load'] + benchmarker: ['test-double-http2'], + duration: 5 }, { flags: ['--no-warnings'] }); -function main({ streams, length, size }) { +function main({ streams, length, size, duration }) { const http2 = require('http2'); const server = http2.createServer(); server.on('stream', (stream) => { @@ -29,6 +30,7 @@ function main({ streams, length, size }) { bench.http({ path: '/', requests: 10000, + duration, maxConcurrentStreams: streams, }, () => { server.close(); }); }); diff --git a/benchmark/misc/arguments.js b/benchmark/misc/arguments.js index 8fefe617a51..39f4020b1ad 100644 --- a/benchmark/misc/arguments.js +++ b/benchmark/misc/arguments.js @@ -34,8 +34,6 @@ function usingPredefined() { function main({ n, method, args }) { let fn; switch (method) { - // '' is a default case for tests - case '': case 'restAndSpread': fn = usingRestAndSpread; break; diff --git a/benchmark/misc/getstringwidth.js b/benchmark/misc/getstringwidth.js index c10f7af8483..9dd4b47df71 100644 --- a/benchmark/misc/getstringwidth.js +++ b/benchmark/misc/getstringwidth.js @@ -10,8 +10,6 @@ const bench = common.createBenchmark(main, { }); function main({ n, type }) { - // Default value for testing purposes. - type = type || 'ascii'; const { getStringWidth } = require('internal/util/inspect'); const str = ({ diff --git a/benchmark/misc/object-property-bench.js b/benchmark/misc/object-property-bench.js index 0a4d004999e..9b33ac9a636 100644 --- a/benchmark/misc/object-property-bench.js +++ b/benchmark/misc/object-property-bench.js @@ -64,8 +64,6 @@ function runSymbol(n) { function main({ n, method }) { switch (method) { - // '' is a default case for tests - case '': case 'property': runProperty(n); break; diff --git a/benchmark/misc/punycode.js b/benchmark/misc/punycode.js index 5f85df758ef..9c674b5deef 100644 --- a/benchmark/misc/punycode.js +++ b/benchmark/misc/punycode.js @@ -62,8 +62,6 @@ function runICU(n, val) { function main({ n, val, method }) { switch (method) { - // '' is a default case for tests - case '': case 'punycode': runPunycode(n, val); break; diff --git a/benchmark/misc/trace.js b/benchmark/misc/trace.js index bdbf547007e..8620e99329b 100644 --- a/benchmark/misc/trace.js +++ b/benchmark/misc/trace.js @@ -6,7 +6,11 @@ const bench = common.createBenchmark(main, { n: [100000], method: ['trace', 'isTraceCategoryEnabled'] }, { - flags: ['--expose-internals', '--trace-event-categories', 'foo'] + flags: [ + '--expose-internals', + '--no-warnings', + '--trace-event-categories', 'foo', + ] }); const { @@ -37,7 +41,6 @@ function main({ n, method }) { } = common.binding('trace_events'); switch (method) { - case '': case 'trace': doTrace(n, trace); break; diff --git a/benchmark/misc/util-extend-vs-object-assign.js b/benchmark/misc/util-extend-vs-object-assign.js index b3d95f0e2d9..83aec7b9c82 100644 --- a/benchmark/misc/util-extend-vs-object-assign.js +++ b/benchmark/misc/util-extend-vs-object-assign.js @@ -9,10 +9,6 @@ const bench = common.createBenchmark(main, { }); function main({ n, type }) { - // Default value for tests. - if (type === '') - type = 'extend'; - let fn; if (type === 'extend') { fn = util._extend; diff --git a/benchmark/net/net-c2s.js b/benchmark/net/net-c2s.js index cacd6815630..424c8f6dd07 100644 --- a/benchmark/net/net-c2s.js +++ b/benchmark/net/net-c2s.js @@ -9,6 +9,8 @@ const bench = common.createBenchmark(main, { len: [64, 102400, 1024 * 1024 * 16], type: ['utf', 'asc', 'buf'], dur: [5], +}, { + test: { len: 1024 } }); let chunk; diff --git a/benchmark/net/net-pipe.js b/benchmark/net/net-pipe.js index d86ff73041d..32e1085299a 100644 --- a/benchmark/net/net-pipe.js +++ b/benchmark/net/net-pipe.js @@ -9,6 +9,8 @@ const bench = common.createBenchmark(main, { len: [2, 64, 102400, 1024 * 1024 * 16], type: ['utf', 'asc', 'buf'], dur: [5], +}, { + test: { len: 1024 } }); let chunk; diff --git a/benchmark/net/net-s2c.js b/benchmark/net/net-s2c.js index 789eadf0a18..835cc67567b 100644 --- a/benchmark/net/net-s2c.js +++ b/benchmark/net/net-s2c.js @@ -10,6 +10,8 @@ const bench = common.createBenchmark(main, { recvbuflen: [0, 64 * 1024, 1024 * 1024], recvbufgenfn: ['true', 'false'], dur: [5] +}, { + test: { sendchunklen: 256 } }); let chunk; diff --git a/benchmark/net/net-wrap-js-stream-passthrough.js b/benchmark/net/net-wrap-js-stream-passthrough.js index 0d7be36c6aa..3824cfb9c0e 100644 --- a/benchmark/net/net-wrap-js-stream-passthrough.js +++ b/benchmark/net/net-wrap-js-stream-passthrough.js @@ -9,6 +9,7 @@ const bench = common.createBenchmark(main, { type: ['utf', 'asc', 'buf'], dur: [5], }, { + test: { len: 64 }, flags: ['--expose-internals'] }); diff --git a/benchmark/net/tcp-raw-c2s.js b/benchmark/net/tcp-raw-c2s.js index b8af124a7f4..9547c01f38b 100644 --- a/benchmark/net/tcp-raw-c2s.js +++ b/benchmark/net/tcp-raw-c2s.js @@ -12,7 +12,10 @@ const bench = common.createBenchmark(main, { len: [102400, 1024 * 1024 * 16], type: ['utf', 'asc', 'buf'], dur: [5] -}, { flags: [ '--expose-internals', '--no-warnings' ] }); +}, { + test: { len: 1024 }, + flags: [ '--expose-internals', '--no-warnings' ] +}); function main({ dur, len, type }) { const { diff --git a/benchmark/net/tcp-raw-pipe.js b/benchmark/net/tcp-raw-pipe.js index 249b61046a8..e422ff749fd 100644 --- a/benchmark/net/tcp-raw-pipe.js +++ b/benchmark/net/tcp-raw-pipe.js @@ -13,6 +13,7 @@ const bench = common.createBenchmark(main, { type: ['utf', 'asc', 'buf'], dur: [5] }, { + test: { len: 1024 }, flags: [ '--expose-internals', '--no-warnings' ] }); diff --git a/benchmark/net/tcp-raw-s2c.js b/benchmark/net/tcp-raw-s2c.js index 393cf060489..be7279ca0c3 100644 --- a/benchmark/net/tcp-raw-s2c.js +++ b/benchmark/net/tcp-raw-s2c.js @@ -13,6 +13,7 @@ const bench = common.createBenchmark(main, { type: ['utf', 'asc', 'buf'], dur: [5] }, { + test: { len: 1024 }, flags: [ '--expose-internals', '--no-warnings' ] }); diff --git a/benchmark/run.js b/benchmark/run.js index 8e81a2c5e16..c2e38ce96d7 100644 --- a/benchmark/run.js +++ b/benchmark/run.js @@ -4,7 +4,7 @@ const path = require('path'); const fork = require('child_process').fork; const CLI = require('./_cli.js'); -const cli = CLI(`usage: ./node run.js [options] [--] ... +const cli = new CLI(`usage: ./node run.js [options] [--] ... Run each benchmark in the directory a single time, more than one directory can be specified. @@ -14,6 +14,9 @@ const cli = CLI(`usage: ./node run.js [options] [--] ... repeated) --set variable=value set benchmark variable (can be repeated) --format [simple|csv] optional value that specifies the output format + test only run a single configuration from the options + matrix + all each benchmark category is run one after the other `, { arrayArgs: ['set', 'filter', 'exclude'] }); const benchmarks = cli.benchmarks(); @@ -37,7 +40,11 @@ if (format === 'csv') { (function recursive(i) { const filename = benchmarks[i]; - const child = fork(path.resolve(__dirname, filename), cli.optional.set); + const child = fork( + path.resolve(__dirname, filename), + cli.test ? ['--test'] : [], + cli.optional.set + ); if (format !== 'csv') { console.log(); @@ -51,10 +58,10 @@ if (format === 'csv') { // Construct configuration string, " A=a, B=b, ..." let conf = ''; for (const key of Object.keys(data.conf)) { - conf += ` ${key}=${JSON.stringify(data.conf[key])}`; + if (conf !== '') + conf += ' '; + conf += `${key}=${JSON.stringify(data.conf[key])}`; } - // Delete first space of the configuration - conf = conf.slice(1); if (format === 'csv') { // Escape quotes (") for correct csv formatting conf = conf.replace(/"/g, '""'); diff --git a/benchmark/scatter.js b/benchmark/scatter.js index 10649e6bb51..ecbf8e0041c 100644 --- a/benchmark/scatter.js +++ b/benchmark/scatter.js @@ -7,7 +7,7 @@ const CLI = require('./_cli.js'); // // Parse arguments // -const cli = CLI(`usage: ./node scatter.js [options] [--] +const cli = new CLI(`usage: ./node scatter.js [options] [--] Run the benchmark script many times and output the rate (ops/s) together with the benchmark variables as a csv. diff --git a/benchmark/tls/secure-pair.js b/benchmark/tls/secure-pair.js index c52f4cbf918..76658fc3c42 100644 --- a/benchmark/tls/secure-pair.js +++ b/benchmark/tls/secure-pair.js @@ -3,7 +3,9 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { dur: [5], securing: ['SecurePair', 'TLSSocket', 'clear'], - size: [2, 100, 1024, 1024 * 1024] + size: [100, 1024, 1024 * 1024] +}, { + flags: ['--no-warnings'] }); const fixtures = require('../../test/common/fixtures'); diff --git a/benchmark/tls/throughput.js b/benchmark/tls/throughput.js index a8f2d19649d..3ea84aa84ef 100644 --- a/benchmark/tls/throughput.js +++ b/benchmark/tls/throughput.js @@ -3,7 +3,7 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { dur: [5], type: ['buf', 'asc', 'utf'], - size: [2, 1024, 1024 * 1024] + size: [100, 1024, 1024 * 1024, 4 * 1024 * 1024, 16 * 1024 * 1024] }); const fixtures = require('../../test/common/fixtures'); diff --git a/benchmark/url/url-format.js b/benchmark/url/url-format.js index 3e91cefd363..be5632d2b67 100644 --- a/benchmark/url/url-format.js +++ b/benchmark/url/url-format.js @@ -13,7 +13,7 @@ const bench = common.createBenchmark(main, { }); function main({ type, n }) { - const input = inputs[type] || ''; + const input = inputs[type]; // Force-optimize url.format() so that the benchmark doesn't get // disrupted by the optimizer kicking in halfway through. diff --git a/benchmark/url/url-parse.js b/benchmark/url/url-parse.js index 751a11201b1..b3e83188b21 100644 --- a/benchmark/url/url-parse.js +++ b/benchmark/url/url-parse.js @@ -13,7 +13,7 @@ const bench = common.createBenchmark(main, { }); function main({ type, n }) { - const input = inputs[type] || ''; + const input = inputs[type]; bench.start(); for (let i = 0; i < n; i += 1) diff --git a/benchmark/util/format.js b/benchmark/util/format.js index 976e0f4e655..f7a6caa81c9 100644 --- a/benchmark/util/format.js +++ b/benchmark/util/format.js @@ -23,8 +23,7 @@ const bench = common.createBenchmark(main, { }); function main({ n, type }) { - // For testing, if supplied with an empty type, default to string. - const [first, second] = inputs[type || 'string']; + const [first, second] = inputs[type]; bench.start(); for (let i = 0; i < n; i++) { diff --git a/benchmark/util/inspect-array.js b/benchmark/util/inspect-array.js index 4fd73785f78..987b4047918 100644 --- a/benchmark/util/inspect-array.js +++ b/benchmark/util/inspect-array.js @@ -23,8 +23,6 @@ function main({ n, len, type }) { opts = { showHidden: true }; arr = arr.fill('denseArray'); break; - // For testing, if supplied with an empty type, default to denseArray. - case '': case 'denseArray': arr = arr.fill('denseArray'); break; diff --git a/benchmark/util/type-check.js b/benchmark/util/type-check.js index 5b992e729e6..792f61cf6fb 100644 --- a/benchmark/util/type-check.js +++ b/benchmark/util/type-check.js @@ -31,13 +31,10 @@ const bench = common.createBenchmark(main, { argument: ['true', 'false-primitive', 'false-object'], n: [1e5] }, { - flags: ['--expose-internals'] + flags: ['--expose-internals', '--no-warnings'] }); function main({ type, argument, version, n }) { - // For testing, if supplied with an empty type, default to ArrayBufferView. - type = type || 'ArrayBufferView'; - const util = common.binding('util'); const types = require('internal/util/types'); diff --git a/benchmark/zlib/pipe.js b/benchmark/zlib/pipe.js index 6a1c427bc83..76b0ddc6c65 100644 --- a/benchmark/zlib/pipe.js +++ b/benchmark/zlib/pipe.js @@ -8,6 +8,11 @@ const bench = common.createBenchmark(main, { duration: [5], type: ['string', 'buffer'], algorithm: ['gzip', 'brotli'] +}, { + test: { + inputLen: 1024, + duration: 0.2 + } }); function main({ inputLen, duration, type, algorithm }) { diff --git a/common.gypi b/common.gypi index 0f52e138ca2..6488241b3c6 100644 --- a/common.gypi +++ b/common.gypi @@ -39,7 +39,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.29', + 'v8_embedder_string': '-node.30', ##### V8 defaults for Node.js ##### @@ -102,6 +102,9 @@ 'obj_dir%': '<(PRODUCT_DIR)/obj.target', 'v8_base': '<(PRODUCT_DIR)/libv8_snapshot.a', }], + ['target_arch in "ppc64 s390x"', { + 'v8_enable_backtrace': 1, + }], ], }, diff --git a/configure b/configure index 39decd9a55c..bc0a01d9855 100755 --- a/configure +++ b/configure @@ -7,6 +7,7 @@ # pyenv will alert which shims are available and then will fail the build. _=[ 'exec' '/bin/sh' '-c' ''' test ${TRAVIS} && exec python "$0" "$@" # workaround for pyenv on Travis CI +test ${FORCE_PYTHON2} && exec python2 "$0" "$@" # workaround for gclient which python3.8 >/dev/null && exec python3.8 "$0" "$@" which python3.7 >/dev/null && exec python3.7 "$0" "$@" which python3.6 >/dev/null && exec python3.6 "$0" "$@" diff --git a/configure.py b/configure.py index beb08df0884..0190e31b41a 100755 --- a/configure.py +++ b/configure.py @@ -301,6 +301,27 @@ dest='shared_zlib_libpath', help='a directory to search for the shared zlib DLL') +shared_optgroup.add_option('--shared-brotli', + action='store_true', + dest='shared_brotli', + help='link to a shared brotli DLL instead of static linking') + +shared_optgroup.add_option('--shared-brotli-includes', + action='store', + dest='shared_brotli_includes', + help='directory containing brotli header files') + +shared_optgroup.add_option('--shared-brotli-libname', + action='store', + dest='shared_brotli_libname', + default='brotlidec,brotlienc', + help='alternative lib name to link to [default: %default]') + +shared_optgroup.add_option('--shared-brotli-libpath', + action='store', + dest='shared_brotli_libpath', + help='a directory to search for the shared brotli DLL') + shared_optgroup.add_option('--shared-cares', action='store_true', dest='shared_cares', @@ -680,7 +701,11 @@ def pkg_config(pkg): retval = () for flag in ['--libs-only-l', '--cflags-only-I', '--libs-only-L', '--modversion']: - args += [flag, pkg] + args += [flag] + if isinstance(pkg, list): + args += pkg + else: + args += [pkg] try: proc = subprocess.Popen(shlex.split(pkg_config) + args, stdout=subprocess.PIPE) @@ -1688,6 +1713,7 @@ def make_bin_override(): configure_library('zlib', output) configure_library('http_parser', output) configure_library('libuv', output) +configure_library('brotli', output, pkgname=['libbrotlidec', 'libbrotlienc']) configure_library('cares', output, pkgname='libcares') configure_library('nghttp2', output, pkgname='libnghttp2') configure_v8(output) diff --git a/deps/npm/node_modules/term-size/vendor/macos/term-size b/deps/npm/node_modules/term-size/vendor/macos/term-size index e383cc737f8..c32a1fdc1b0 100755 Binary files a/deps/npm/node_modules/term-size/vendor/macos/term-size and b/deps/npm/node_modules/term-size/vendor/macos/term-size differ diff --git a/deps/openssl/openssl/crypto/rand/rand_unix.c b/deps/openssl/openssl/crypto/rand/rand_unix.c index 69efcdeed75..315af610f84 100644 --- a/deps/openssl/openssl/crypto/rand/rand_unix.c +++ b/deps/openssl/openssl/crypto/rand/rand_unix.c @@ -282,12 +282,58 @@ static ssize_t sysctl_random(char *buf, size_t buflen) # if defined(OPENSSL_RAND_SEED_GETRANDOM) # if defined(__linux) && !defined(__NR_getrandom) -# if defined(__arm__) && defined(__NR_SYSCALL_BASE) +# if defined(__arm__) # define __NR_getrandom (__NR_SYSCALL_BASE+384) # elif defined(__i386__) # define __NR_getrandom 355 -# elif defined(__x86_64__) && !defined(__ILP32__) -# define __NR_getrandom 318 +# elif defined(__x86_64__) +# if defined(__ILP32__) +# define __NR_getrandom (__X32_SYSCALL_BIT + 318) +# else +# define __NR_getrandom 318 +# endif +# elif defined(__xtensa__) +# define __NR_getrandom 338 +# elif defined(__s390__) || defined(__s390x__) +# define __NR_getrandom 349 +# elif defined(__bfin__) +# define __NR_getrandom 389 +# elif defined(__powerpc__) +# define __NR_getrandom 359 +# elif defined(__mips__) || defined(__mips64) +# if _MIPS_SIM == _MIPS_SIM_ABI32 +# define __NR_getrandom (__NR_Linux + 353) +# elif _MIPS_SIM == _MIPS_SIM_ABI64 +# define __NR_getrandom (__NR_Linux + 313) +# elif _MIPS_SIM == _MIPS_SIM_NABI32 +# define __NR_getrandom (__NR_Linux + 317) +# endif +# elif defined(__hppa__) +# define __NR_getrandom (__NR_Linux + 339) +# elif defined(__sparc__) +# define __NR_getrandom 347 +# elif defined(__ia64__) +# define __NR_getrandom 1339 +# elif defined(__alpha__) +# define __NR_getrandom 511 +# elif defined(__sh__) +# if defined(__SH5__) +# define __NR_getrandom 373 +# else +# define __NR_getrandom 384 +# endif +# elif defined(__avr32__) +# define __NR_getrandom 317 +# elif defined(__microblaze__) +# define __NR_getrandom 385 +# elif defined(__m68k__) +# define __NR_getrandom 352 +# elif defined(__cris__) +# define __NR_getrandom 356 +# elif defined(__aarch64__) +# define __NR_getrandom 278 +# else /* generic */ +# define __NR_getrandom 278 # endif # endif diff --git a/deps/v8/src/objects/dictionary-inl.h b/deps/v8/src/objects/dictionary-inl.h index 18b2ee67a4d..3a5b8cb3c7e 100644 --- a/deps/v8/src/objects/dictionary-inl.h +++ b/deps/v8/src/objects/dictionary-inl.h @@ -61,13 +61,13 @@ BaseNameDictionary::BaseNameDictionary(Address ptr) : Dictionary(ptr) {} template -void BaseNameDictionary::SetNextEnumerationIndex(int index) { - DCHECK_NE(0, index); +void BaseNameDictionary::set_next_enumeration_index(int index) { + DCHECK_LT(0, index); this->set(kNextEnumerationIndexIndex, Smi::FromInt(index)); } template -int BaseNameDictionary::NextEnumerationIndex() { +int BaseNameDictionary::next_enumeration_index() { return Smi::ToInt(this->get(kNextEnumerationIndexIndex)); } diff --git a/deps/v8/src/objects/dictionary.h b/deps/v8/src/objects/dictionary.h index 35137c7d945..eb15a77e33e 100644 --- a/deps/v8/src/objects/dictionary.h +++ b/deps/v8/src/objects/dictionary.h @@ -120,10 +120,6 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) BaseNameDictionary static const int kObjectHashIndex = kNextEnumerationIndexIndex + 1; static const int kEntryValueIndex = 1; - // Accessors for next enumeration index. - inline void SetNextEnumerationIndex(int index); - inline int NextEnumerationIndex(); - inline void SetHash(int hash); inline int Hash() const; @@ -138,6 +134,13 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) BaseNameDictionary V8_WARN_UNUSED_RESULT static ExceptionStatus CollectKeysTo( Handle dictionary, KeyAccumulator* keys); + // Allocate the next enumeration index. Possibly updates all enumeration + // indices in the table. + static int NextEnumerationIndex(Isolate* isolate, Handle dictionary); + // Accessors for next enumeration index. + inline int next_enumeration_index(); + inline void set_next_enumeration_index(int index); + // Return the key indices sorted by its enumeration index. static Handle IterationIndices(Isolate* isolate, Handle dictionary); @@ -149,10 +152,6 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) BaseNameDictionary Handle storage, KeyCollectionMode mode, KeyAccumulator* accumulator); - // Ensure enough space for n additional elements. - static Handle EnsureCapacity(Isolate* isolate, - Handle dictionary, int n); - V8_WARN_UNUSED_RESULT static Handle AddNoUpdateNextEnumerationIndex( Isolate* isolate, Handle dictionary, Key key, Handle value, PropertyDetails details, int* entry_out = nullptr); diff --git a/deps/v8/src/objects/hash-table.h b/deps/v8/src/objects/hash-table.h index 5cdeb0c0ec4..32013e58e56 100644 --- a/deps/v8/src/objects/hash-table.h +++ b/deps/v8/src/objects/hash-table.h @@ -201,7 +201,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable // Ensure enough space for n additional elements. V8_WARN_UNUSED_RESULT static Handle EnsureCapacity( - Isolate* isolate, Handle table, int n, + Isolate* isolate, Handle table, int n = 1, AllocationType allocation = AllocationType::kYoung); // Returns true if this table has sufficient capacity for adding n elements. diff --git a/deps/v8/src/objects/js-objects.cc b/deps/v8/src/objects/js-objects.cc index ea0917f18fe..4b1d9a4c862 100644 --- a/deps/v8/src/objects/js-objects.cc +++ b/deps/v8/src/objects/js-objects.cc @@ -2908,7 +2908,7 @@ void MigrateFastToSlow(Isolate* isolate, Handle object, } // Copy the next enumeration index from instance descriptor. - dictionary->SetNextEnumerationIndex(real_size + 1); + dictionary->set_next_enumeration_index(real_size + 1); // From here on we cannot fail and we shouldn't GC anymore. DisallowHeapAllocation no_allocation; diff --git a/deps/v8/src/objects/literal-objects.cc b/deps/v8/src/objects/literal-objects.cc index 98c41cbfb5f..827a8b10219 100644 --- a/deps/v8/src/objects/literal-objects.cc +++ b/deps/v8/src/objects/literal-objects.cc @@ -363,7 +363,7 @@ class ObjectDescriptor { void Finalize(Isolate* isolate) { if (HasDictionaryProperties()) { - properties_dictionary_template_->SetNextEnumerationIndex( + properties_dictionary_template_->set_next_enumeration_index( next_enumeration_index_); computed_properties_ = FixedArray::ShrinkOrEmpty( isolate, computed_properties_, current_computed_index_); diff --git a/deps/v8/src/objects/lookup.cc b/deps/v8/src/objects/lookup.cc index 7f626cc2233..0700a6fc921 100644 --- a/deps/v8/src/objects/lookup.cc +++ b/deps/v8/src/objects/lookup.cc @@ -634,8 +634,8 @@ void LookupIterator::PrepareTransitionToDataProperty( transition_ = cell; // Assign an enumeration index to the property and update // SetNextEnumerationIndex. - int index = dictionary->NextEnumerationIndex(); - dictionary->SetNextEnumerationIndex(index + 1); + int index = GlobalDictionary::NextEnumerationIndex(isolate_, dictionary); + dictionary->set_next_enumeration_index(index + 1); property_details_ = PropertyDetails( kData, attributes, PropertyCellType::kUninitialized, index); PropertyCellType new_type = diff --git a/deps/v8/src/objects/objects.cc b/deps/v8/src/objects/objects.cc index 723023b7079..1328b517e8b 100644 --- a/deps/v8/src/objects/objects.cc +++ b/deps/v8/src/objects/objects.cc @@ -6677,7 +6677,7 @@ void StringTable::EnsureCapacityForDeserialization(Isolate* isolate, int expected) { Handle table = isolate->factory()->string_table(); // We need a key instance for the virtual hash function. - table = StringTable::EnsureCapacity(isolate, table, expected); + table = EnsureCapacity(isolate, table, expected); isolate->heap()->SetRootStringTable(*table); } @@ -6729,7 +6729,7 @@ Handle StringTable::LookupKey(Isolate* isolate, StringTableKey* key) { table = StringTable::CautiousShrink(isolate, table); // Adding new string. Grow table if needed. - table = StringTable::EnsureCapacity(isolate, table, 1); + table = EnsureCapacity(isolate, table); isolate->heap()->SetRootStringTable(*table); return AddKeyNoResize(isolate, key); @@ -6870,7 +6870,7 @@ Handle StringSet::New(Isolate* isolate) { Handle StringSet::Add(Isolate* isolate, Handle stringset, Handle name) { if (!stringset->Has(isolate, name)) { - stringset = EnsureCapacity(isolate, stringset, 1); + stringset = EnsureCapacity(isolate, stringset); uint32_t hash = ShapeT::Hash(isolate, *name); int entry = stringset->FindInsertionEntry(hash); stringset->set(EntryToIndex(entry), *name); @@ -6888,7 +6888,7 @@ Handle ObjectHashSet::Add(Isolate* isolate, Handle key) { int32_t hash = key->GetOrCreateHash(isolate).value(); if (!set->Has(isolate, key, hash)) { - set = EnsureCapacity(isolate, set, 1); + set = EnsureCapacity(isolate, set); int entry = set->FindInsertionEntry(hash); set->set(EntryToIndex(entry), *key); set->ElementAdded(); @@ -7084,7 +7084,7 @@ Handle CompilationCacheTable::PutScript( src = String::Flatten(isolate, src); StringSharedKey key(src, shared, language_mode, kNoSourcePosition); Handle k = key.AsHandle(isolate); - cache = EnsureCapacity(isolate, cache, 1); + cache = EnsureCapacity(isolate, cache); int entry = cache->FindInsertionEntry(key.Hash()); cache->set(EntryToIndex(entry), *k); cache->set(EntryToIndex(entry) + 1, *value); @@ -7116,7 +7116,7 @@ Handle CompilationCacheTable::PutEval( } } - cache = EnsureCapacity(isolate, cache, 1); + cache = EnsureCapacity(isolate, cache); int entry = cache->FindInsertionEntry(key.Hash()); Handle k = isolate->factory()->NewNumber(static_cast(key.Hash())); @@ -7130,7 +7130,7 @@ Handle CompilationCacheTable::PutRegExp( Isolate* isolate, Handle cache, Handle src, JSRegExp::Flags flags, Handle value) { RegExpKey key(src, flags); - cache = EnsureCapacity(isolate, cache, 1); + cache = EnsureCapacity(isolate, cache); int entry = cache->FindInsertionEntry(key.Hash()); // We store the value in the key slot, and compare the search key // to the stored value with a custon IsMatch function during lookups. @@ -7192,15 +7192,16 @@ Handle BaseNameDictionary::New( Handle dict = Dictionary::New( isolate, at_least_space_for, allocation, capacity_option); dict->SetHash(PropertyArray::kNoHashSentinel); - dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex); + dict->set_next_enumeration_index(PropertyDetails::kInitialIndex); return dict; } template -Handle BaseNameDictionary::EnsureCapacity( - Isolate* isolate, Handle dictionary, int n) { - // Check whether there are enough enumeration indices to add n elements. - if (!PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) { +int BaseNameDictionary::NextEnumerationIndex( + Isolate* isolate, Handle dictionary) { + int index = dictionary->next_enumeration_index(); + // Check whether the next enumeration index is valid. + if (!PropertyDetails::IsValidIndex(index)) { // If not, we generate new indices for the properties. int length = dictionary->NumberOfElements(); @@ -7221,11 +7222,12 @@ Handle BaseNameDictionary::EnsureCapacity( dictionary->DetailsAtPut(isolate, index, new_details); } - // Set the next enumeration index. - dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex + - length); + index = PropertyDetails::kInitialIndex + length; } - return HashTable::EnsureCapacity(isolate, dictionary, n); + + // Don't update the next enumeration index here, since we might be looking at + // an immutable empty dictionary. + return index; } template @@ -7274,13 +7276,13 @@ Handle BaseNameDictionary::Add( DCHECK_EQ(0, details.dictionary_index()); // Assign an enumeration index to the property and update // SetNextEnumerationIndex. - int index = dictionary->NextEnumerationIndex(); + int index = Derived::NextEnumerationIndex(isolate, dictionary); details = details.set_index(index); dictionary = AddNoUpdateNextEnumerationIndex(isolate, dictionary, key, value, details, entry_out); // Update enumeration index here in order to avoid potential modification of // the canonical empty dictionary which lives in read only space. - dictionary->SetNextEnumerationIndex(index + 1); + dictionary->set_next_enumeration_index(index + 1); return dictionary; } @@ -7294,7 +7296,7 @@ Handle Dictionary::Add(Isolate* isolate, // Valdate key is absent. SLOW_DCHECK((dictionary->FindEntry(isolate, key) == Dictionary::kNotFound)); // Check whether the dictionary should be extended. - dictionary = Derived::EnsureCapacity(isolate, dictionary, 1); + dictionary = Derived::EnsureCapacity(isolate, dictionary); // Compute the key object. Handle k = Shape::AsHandle(isolate, key); @@ -7644,7 +7646,7 @@ Handle ObjectHashTableBase::Put(Isolate* isolate, } // Check whether the hash table should be extended. - table = Derived::EnsureCapacity(isolate, table, 1); + table = Derived::EnsureCapacity(isolate, table); table->AddEntry(table->FindInsertionEntry(hash), *key, *value); return table; } @@ -7892,8 +7894,8 @@ Handle PropertyCell::PrepareForValue( // Preserve the enumeration index unless the property was deleted or never // initialized. if (cell->value().IsTheHole(isolate)) { - index = dictionary->NextEnumerationIndex(); - dictionary->SetNextEnumerationIndex(index + 1); + index = GlobalDictionary::NextEnumerationIndex(isolate, dictionary); + dictionary->set_next_enumeration_index(index + 1); } else { index = original_details.dictionary_index(); } diff --git a/deps/zlib/README.md b/deps/zlib/README.md deleted file mode 100644 index a12f0a7876a..00000000000 --- a/deps/zlib/README.md +++ /dev/null @@ -1,6 +0,0 @@ -This copy of zlib comes from the Chromium team's zlib fork which incorporated performance improvements not currently available in standard zlib. - -To update this code: - -* Clone https://chromium.googlesource.com/chromium/src/third_party/zlib -* Comment out the `#include "chromeconf.h"` in zconf.h to maintain full compatibility with node addons diff --git a/deps/zlib/google/test/data/create_test_zip.sh b/deps/zlib/google/test/data/create_test_zip.sh old mode 100644 new mode 100755 diff --git a/doc/abi_version_registry.json b/doc/abi_version_registry.json index 50e7008bc75..f044d97599f 100644 --- a/doc/abi_version_registry.json +++ b/doc/abi_version_registry.json @@ -1,5 +1,6 @@ { "NODE_MODULE_VERSION": [ + { "modules": 82, "runtime": "electron", "variant": "electron", "versions": "10" }, { "modules": 81, "runtime": "node", "variant": "v8_7.9", "versions": "14.0.0-pre" }, { "modules": 80, "runtime": "electron", "variant": "electron", "versions": "9" }, { "modules": 79, "runtime": "node", "variant": "v8_7.8", "versions": "13" }, diff --git a/doc/api/addons.md b/doc/api/addons.md index 6b77338348e..e6b5704e8fb 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -8,8 +8,8 @@ Addons are dynamically-linked shared objects written in C++. The Addons provide an interface between JavaScript and C/C++ libraries. There are three options for implementing Addons: N-API, nan, or direct -use of internal V8, libuv and Node.js libraries. Unless you need direct -access to functionality which is not exposed by N-API, use N-API. +use of internal V8, libuv and Node.js libraries. Unless there is a need for +direct access to functionality which is not exposed by N-API, use N-API. Refer to [C/C++ Addons with N-API](n-api.html) for more information on N-API. When not using N-API, implementing Addons is complicated, @@ -313,7 +313,7 @@ require('./build/Release/addon'); Once the source code has been written, it must be compiled into the binary `addon.node` file. To do so, create a file called `binding.gyp` in the top-level of the project describing the build configuration of the module -using a JSON-like format. This file is used by [node-gyp][] — a tool written +using a JSON-like format. This file is used by [node-gyp][], a tool written specifically to compile Node.js Addons. ```json diff --git a/doc/api/assert.md b/doc/api/assert.md index 9d513f9a0f8..732b4ba8e02 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -11,7 +11,7 @@ invariants. * Returns: {Object} The resource representing the current execution. @@ -859,6 +859,339 @@ for (let i = 0; i < 10; i++) { } ``` +## Class: `AsyncLocalStorage` + + +This class is used to create asynchronous state within callbacks and promise +chains. It allows storing data throughout the lifetime of a web request +or any other asynchronous duration. It is similar to thread-local storage +in other languages. + +The following example builds a logger that will always know the current HTTP +request and uses it to display enhanced logs without needing to explicitly +provide the current HTTP request to it. + +```js +const { AsyncLocalStorage } = require('async_hooks'); +const http = require('http'); + +const kReq = 'CURRENT_REQUEST'; +const asyncLocalStorage = new AsyncLocalStorage(); + +function log(...args) { + const store = asyncLocalStorage.getStore(); + // Make sure the store exists and it contains a request. + if (store && store.has(kReq)) { + const req = store.get(kReq); + // Prints `GET /items ERR could not do something + console.log(req.method, req.url, ...args); + } else { + console.log(...args); + } +} + +http.createServer((request, response) => { + asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set(kReq, request); + someAsyncOperation((err, result) => { + if (err) { + log('ERR', err.message); + } + }); + }); +}) +.listen(8080); +``` + +When having multiple instances of `AsyncLocalStorage`, they are independent +from each other. It is safe to instantiate this class multiple times. + +### `new AsyncLocalStorage()` + + +Creates a new instance of `AsyncLocalStorage`. Store is only provided within a +`run` or a `runSyncAndReturn` method call. + +### `asyncLocalStorage.disable()` + + +This method disables the instance of `AsyncLocalStorage`. All subsequent calls +to `asyncLocalStorage.getStore()` will return `undefined` until +`asyncLocalStorage.run()` or `asyncLocalStorage.runSyncAndReturn()` +is called again. + +When calling `asyncLocalStorage.disable()`, all current contexts linked to the +instance will be exited. + +Calling `asyncLocalStorage.disable()` is required before the +`asyncLocalStorage` can be garbage collected. This does not apply to stores +provided by the `asyncLocalStorage`, as those objects are garbage collected +along with the corresponding async resources. + +This method is to be used when the `asyncLocalStorage` is not in use anymore +in the current process. + +### `asyncLocalStorage.getStore()` + + +* Returns: {any} + +This method returns the current store. +If this method is called outside of an asynchronous context initialized by +calling `asyncLocalStorage.run` or `asyncLocalStorage.runAndReturn`, it will +return `undefined`. + +### `asyncLocalStorage.enterWith(store)` + + +* `store` {any} + +Calling `asyncLocalStorage.enterWith(store)` will transition into the context +for the remainder of the current synchronous execution and will persist +through any following asynchronous calls. + +Example: + +```js +const store = { id: 1 }; +asyncLocalStorage.enterWith(store); +asyncLocalStorage.getStore(); // Returns the store object +someAsyncOperation(() => { + asyncLocalStorage.getStore(); // Returns the same object +}); +``` + +This transition will continue for the _entire_ synchronous execution. +This means that if, for example, the context is entered within an event +handler subsequent event handlers will also run within that context unless +specifically bound to another context with an `AsyncResource`. + +```js +const store = { id: 1 }; + +emitter.on('my-event', () => { + asyncLocalStorage.enterWith(store); +}); +emitter.on('my-event', () => { + asyncLocalStorage.getStore(); // Returns the same object +}); + +asyncLocalStorage.getStore(); // Returns undefined +emitter.emit('my-event'); +asyncLocalStorage.getStore(); // Returns the same object +``` + +### `asyncLocalStorage.run(store, callback[, ...args])` + + +* `store` {any} +* `callback` {Function} +* `...args` {any} + +Calling `asyncLocalStorage.run(callback)` will create a new asynchronous +context. Within the callback function and the asynchronous operations from +the callback, `asyncLocalStorage.getStore()` will return the object or +the primitive value passed into the `store` argument (known as "the store"). +This store will be persistent through the following asynchronous calls. + +The callback will be ran asynchronously. Optionally, arguments can be passed +to the function. They will be passed to the callback function. + +If an error is thrown by the callback function, it will not be caught by +a `try/catch` block as the callback is ran in a new asynchronous resource. +Also, the stacktrace will be impacted by the asynchronous call. + +Example: + +```js +const store = { id: 1 }; +asyncLocalStorage.run(store, () => { + asyncLocalStorage.getStore(); // Returns the store object + someAsyncOperation(() => { + asyncLocalStorage.getStore(); // Returns the same object + }); +}); +asyncLocalStorage.getStore(); // Returns undefined +``` + +### `asyncLocalStorage.exit(callback[, ...args])` + + +* `callback` {Function} +* `...args` {any} + +Calling `asyncLocalStorage.exit(callback)` will create a new asynchronous +context. +Within the callback function and the asynchronous operations from the callback, +`asyncLocalStorage.getStore()` will return `undefined`. + +The callback will be ran asynchronously. Optionally, arguments can be passed +to the function. They will be passed to the callback function. + +If an error is thrown by the callback function, it will not be caught by +a `try/catch` block as the callback is ran in a new asynchronous resource. +Also, the stacktrace will be impacted by the asynchronous call. + +Example: + +```js +asyncLocalStorage.run('store value', () => { + asyncLocalStorage.getStore(); // Returns 'store value' + asyncLocalStorage.exit(() => { + asyncLocalStorage.getStore(); // Returns undefined + }); + asyncLocalStorage.getStore(); // Returns 'store value' +}); +``` + +### `asyncLocalStorage.runSyncAndReturn(store, callback[, ...args])` + + +* `store` {any} +* `callback` {Function} +* `...args` {any} + +This methods runs a function synchronously within a context and return its +return value. The store is not accessible outside of the callback function or +the asynchronous operations created within the callback. + +Optionally, arguments can be passed to the function. They will be passed to +the callback function. + +If the callback function throws an error, it will be thrown by +`runSyncAndReturn` too. The stacktrace will not be impacted by this call and +the context will be exited. + +Example: + +```js +const store = { id: 2 }; +try { + asyncLocalStorage.runSyncAndReturn(store, () => { + asyncLocalStorage.getStore(); // Returns the store object + throw new Error(); + }); +} catch (e) { + asyncLocalStorage.getStore(); // Returns undefined + // The error will be caught here +} +``` + +### `asyncLocalStorage.exitSyncAndReturn(callback[, ...args])` + + +* `callback` {Function} +* `...args` {any} + +This methods runs a function synchronously outside of a context and return its +return value. The store is not accessible within the callback function or +the asynchronous operations created within the callback. + +Optionally, arguments can be passed to the function. They will be passed to +the callback function. + +If the callback function throws an error, it will be thrown by +`exitSyncAndReturn` too. The stacktrace will not be impacted by this call and +the context will be re-entered. + +Example: + +```js +// Within a call to run or runSyncAndReturn +try { + asyncLocalStorage.getStore(); // Returns the store object or value + asyncLocalStorage.exitSyncAndReturn(() => { + asyncLocalStorage.getStore(); // Returns undefined + throw new Error(); + }); +} catch (e) { + asyncLocalStorage.getStore(); // Returns the same object or value + // The error will be caught here +} +``` + +### Choosing between `run` and `runSyncAndReturn` + +#### When to choose `run` + +`run` is asynchronous. It is called with a callback function that +runs within a new asynchronous call. This is the most explicit behavior as +everything that is executed within the callback of `run` (including further +asynchronous operations) will have access to the store. + +If an instance of `AsyncLocalStorage` is used for error management (for +instance, with `process.setUncaughtExceptionCaptureCallback`), only +exceptions thrown in the scope of the callback function will be associated +with the context. + +This method is the safest as it provides strong scoping and consistent +behavior. + +It cannot be promisified using `util.promisify`. If needed, the `Promise` +constructor can be used: + +```js +const store = new Map(); // initialize the store +new Promise((resolve, reject) => { + asyncLocalStorage.run(store, () => { + someFunction((err, result) => { + if (err) { + return reject(err); + } + return resolve(result); + }); + }); +}); +``` + +#### When to choose `runSyncAndReturn` + +`runSyncAndReturn` is synchronous. The callback function will be executed +synchronously and its return value will be returned by `runSyncAndReturn`. +The store will only be accessible from within the callback +function and the asynchronous operations created within this scope. +If the callback throws an error, `runSyncAndReturn` will throw it and it will +not be associated with the context. + +This method provides good scoping while being synchronous. + +#### Usage with `async/await` + +If, within an async function, only one `await` call is to run within a context, +the following pattern should be used: + +```js +async function fn() { + await asyncLocalStorage.runSyncAndReturn(new Map(), () => { + asyncLocalStorage.getStore().set('key', value); + return foo(); // The return value of foo will be awaited + }); +} +``` + +In this example, the store is only available in the callback function and the +functions called by `foo`. Outside of `runSyncAndReturn`, calling `getStore` +will return `undefined`. + [`after` callback]: #async_hooks_after_asyncid [`before` callback]: #async_hooks_before_asyncid [`destroy` callback]: #async_hooks_destroy_asyncid diff --git a/doc/api/buffer.md b/doc/api/buffer.md index 0d7dee9df45..ed8da05bc47 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -123,15 +123,12 @@ added: v5.10.0 --> Node.js can be started using the `--zero-fill-buffers` command line option to -cause all newly allocated `Buffer` instances to be zero-filled upon creation by -default. Before Node.js 8.0.0, this included buffers allocated by `new -Buffer(size)`. Since Node.js 8.0.0, buffers allocated with `new` are always -zero-filled, whether this option is used or not. -[`Buffer.allocUnsafe()`][], [`Buffer.allocUnsafeSlow()`][], and `new -SlowBuffer(size)`. Use of this flag can have a significant negative impact on -performance. Use of the `--zero-fill-buffers` option is recommended only when -necessary to enforce that newly allocated `Buffer` instances cannot contain old -data that is potentially sensitive. +cause all newly-allocated `Buffer` instances to be zero-filled upon creation by +default. Without the option, buffers created with [`Buffer.allocUnsafe()`][], +[`Buffer.allocUnsafeSlow()`][], and `new SlowBuffer(size)` are not zero-filled. +Use of this flag can have a significant negative impact on performance. Use the +`--zero-fill-buffers` option only when necessary to enforce that newly allocated +`Buffer` instances cannot contain old data that is potentially sensitive. ```console $ node --zero-fill-buffers diff --git a/doc/api/cli.md b/doc/api/cli.md index 9d62ac9ba31..66cb665fa82 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -158,7 +158,7 @@ Currently, overriding `Error.prepareStackTrace` is ignored when the ### `--experimental-import-meta-resolve` Enable experimental `import.meta.resolve()` support. @@ -170,6 +170,14 @@ added: v12.9.0 Enable experimental JSON support for the ES Module loader. +### `--experimental-loader=module` + + +Specify the `module` of a custom [experimental ECMAScript Module loader][]. +`module` may be either a path to a file, or an ECMAScript Module name. + ### `--experimental-modules` - -Specify the `module` of a custom [experimental ECMAScript Module loader][]. -`module` may be either a path to a file, or an ECMAScript Module name. - ### `--insecure-http-parser` + +Disable [runtime allocation of executable memory][jitless]. This may be +required on some platforms for security reasons. It can also reduce attack +surface on other platforms, but the performance impact may be severe. + +This flag is inherited from V8 and is subject to change upstream. It may +disappear in a non-semver-major release. + ### `--max-http-header-size=size` Prints a stack trace on SIGINT. @@ -1156,6 +1168,7 @@ V8 options that are allowed are: * `--abort-on-uncaught-exception` * `--disallow-code-generation-from-strings` * `--interpreted-frames-native-stack` +* `--jitless` * `--max-old-space-size` * `--perf-basic-prof-only-functions` * `--perf-basic-prof` @@ -1164,6 +1177,9 @@ V8 options that are allowed are: * `--stack-trace-limit` +`--perf-basic-prof-only-functions`, `--perf-basic-prof`, +`--perf-prof-unwinding-info`, and `--perf-prof` are only available on Linux. + ### `NODE_PATH=path[:…]` * `options`: {Object} @@ -2107,7 +2107,7 @@ Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'` -Type: Runtime +Type: End-of-Life -The `OutgoingMessage.prototype.flush()` method is deprecated. Use +`OutgoingMessage.prototype.flush()` has been removed. Use `OutgoingMessage.prototype.flushHeaders()` instead. diff --git a/doc/api/documentation.md b/doc/api/documentation.md index 14310f7baed..7059c4d33e1 100644 --- a/doc/api/documentation.md +++ b/doc/api/documentation.md @@ -27,9 +27,10 @@ The stability indices are as follows: -> Stability: 1 - Experimental. The feature is not subject to Semantic Versioning -> rules. Non-backward compatible changes or removal may occur in any future -> release. Use of the feature is not recommended in production environments. +> Stability: 1 - Experimental. The feature is not subject to +> [Semantic Versioning][] rules. Non-backward compatible changes or removal may +> occur in any future release. Use of the feature is not recommended in +> production environments. @@ -58,6 +59,7 @@ to the corresponding man pages which describe how the system call works. Most Unix system calls have Windows analogues. Still, behavior differences may be unavoidable. +[Semantic Versioning]: https://semver.org/ [the contributing guide]: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md [the issue tracker]: https://github.com/nodejs/node/issues/new [V8 JavaScript engine]: https://v8.dev/ diff --git a/doc/api/errors.md b/doc/api/errors.md index b186275807a..03853cd18f5 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -533,14 +533,14 @@ program. For a comprehensive list, see the [`errno`(3) man page][]. `ulimit -n 2048` in the same shell that will run the Node.js process. * `ENOENT` (No such file or directory): Commonly raised by [`fs`][] operations - to indicate that a component of the specified pathname does not exist — no + to indicate that a component of the specified pathname does not exist. No entity (file or directory) could be found by the given path. * `ENOTDIR` (Not a directory): A component of the given pathname existed, but was not a directory as expected. Commonly raised by [`fs.readdir`][]. * `ENOTEMPTY` (Directory not empty): A directory with entries was the target - of an operation that requires an empty directory — usually [`fs.unlink`][]. + of an operation that requires an empty directory, usually [`fs.unlink`][]. * `ENOTFOUND` (DNS lookup failed): Indicates a DNS failure of either `EAI_NODATA` or `EAI_NONAME`. This is not a standard POSIX error. @@ -555,7 +555,7 @@ program. For a comprehensive list, see the [`errno`(3) man page][]. * `ETIMEDOUT` (Operation timed out): A connect or send request failed because the connected party did not properly respond after a period of time. Usually - encountered by [`http`][] or [`net`][] — often a sign that a `socket.end()` + encountered by [`http`][] or [`net`][]. Often a sign that a `socket.end()` was not properly called. ## Class: `TypeError` @@ -710,6 +710,14 @@ STDERR/STDOUT, and the data's length is longer than the `maxBuffer` option. `Console` was instantiated without `stdout` stream, or `Console` has a non-writable `stdout` or `stderr` stream. + +### `ERR_CONTEXT_NOT_INITIALIZED` + +The vm context passed into the API is not yet initialized. This could happen +when an error occurs (and is caught) during the creation of the +context, for example, when the allocation fails or the maximum call stack +size is reached when the context is created. + ### `ERR_CONSTRUCT_CALL_REQUIRED` @@ -881,6 +889,13 @@ provided. Encoding provided to `TextDecoder()` API was not one of the [WHATWG Supported Encodings][]. + +### `ERR_EXECUTION_ENVIRONMENT_NOT_AVAILABLE` + +The JS execution context is not associated with a Node.js environment. +This may occur when Node.js is used as an embedded library and some hooks +for the JS engine are not set up properly. + ### `ERR_FALSY_VALUE_REJECTION` @@ -1319,6 +1334,12 @@ An invalid HTTP token was supplied. An IP address is not valid. + +### `ERR_INVALID_MODULE_SPECIFIER` + +The imported module string is an invalid URL, package name, or package subpath +specifier. + ### `ERR_INVALID_OPT_VALUE` @@ -1334,6 +1355,12 @@ An invalid or unknown file encoding was passed. An invalid `package.json` file was found which failed parsing. + +### `ERR_INVALID_PACKAGE_TARGET` + +The `package.json` [exports][] field contains an invalid target mapping value +for the attempted module resolution. + ### `ERR_INVALID_PERFORMANCE_MARK` @@ -1640,6 +1667,13 @@ A non-context-aware native addon was loaded in a process that disallows them. A given value is out of the accepted range. + +### `ERR_PACKAGE_PATH_NOT_EXPORTED` + +The `package.json` [exports][] field does not export the requested subpath. +Because exports are encapsulated, private internal modules that are not exported +cannot be imported through the package resolution, unless using an absolute URL. + ### `ERR_REQUIRE_ESM` @@ -1700,11 +1734,6 @@ value. While using [`dgram.createSocket()`][], the size of the receive or send `Buffer` could not be determined. - -### `ERR_SOCKET_CANNOT_SEND` - -Data could be sent on a socket. - ### `ERR_SOCKET_CLOSED` @@ -1831,7 +1860,7 @@ recommended to use 2048 bits or larger for stronger security. A TLS/SSL handshake timed out. In this case, the server must also abort the connection. - + ### `ERR_TLS_INVALID_CONTEXT` + +The TLS socket must be connected and securily established. Ensure the 'secure' +event is emitted before continuing. + ### `ERR_TLS_INVALID_PROTOCOL_METHOD` @@ -2047,6 +2085,11 @@ meaning of the error depends on the specific function. The WASI instance has already started. + +### `ERR_WORKER_INIT_FAILED` + +The `Worker` initialization failed. + ### `ERR_WORKER_INVALID_EXEC_ARGV` @@ -2252,6 +2295,15 @@ removed: v10.0.0 The `repl` module was unable to parse data from the REPL history file. + +### `ERR_SOCKET_CANNOT_SEND` + + +Data could be sent on a socket. + ### `ERR_STDERR_CLOSE` -* `authority` {string|URL} +* `authority` {string|URL} The remote HTTP/2 server to connect to. This must + be in the form of a minimal, valid URL with the `http://` or `https://` + prefix, host name, and IP port (if a non-default port is used). Userinfo + (user ID and password), path, querystring, and fragment details in the + URL will be ignored. * `options` {Object} * `maxDeflateDynamicTableSize` {number} Sets the maximum dynamic table size for deflating header fields. **Default:** `4Kib`. @@ -3004,7 +3008,7 @@ added: v8.4.0 * Extends: {Stream} -This object is created internally by an HTTP server — not by the user. It is +This object is created internally by an HTTP server, not by the user. It is passed as the second parameter to the [`'request'`][] event. #### Event: `'close'` diff --git a/doc/api/https.md b/doc/api/https.md index 19d2053d377..ac53b6f0fde 100644 --- a/doc/api/https.md +++ b/doc/api/https.md @@ -372,7 +372,7 @@ const options = { return new Error(msg); } - // Pin the exact certificate, rather then the pub key + // Pin the exact certificate, rather than the pub key const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; if (cert.fingerprint256 !== cert256) { diff --git a/doc/api/modules.md b/doc/api/modules.md index d69a2a2f151..4c91c27f52b 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -945,7 +945,7 @@ added: v0.3.7 * {Object} Provides general utility methods when interacting with instances of -`Module` — the `module` variable often seen in file modules. Accessed +`Module`, the `module` variable often seen in file modules. Accessed via `require('module')`. ### `module.builtinModules` diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 9ccfeb9cb74..2b322968c17 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -374,6 +374,7 @@ tied to the life cycle of the Agent. ### napi_set_instance_data ```C @@ -401,6 +402,7 @@ by the previous call, it will not be called. ### napi_get_instance_data ```C @@ -1663,10 +1665,9 @@ the `napi_value` in question is of the JavaScript type expected by the API. #### napi_key_collection_mode -> Stability: 1 - Experimental - ```C typedef enum { napi_key_include_prototypes, @@ -1685,10 +1686,9 @@ of the objects's prototype chain as well. #### napi_key_filter -> Stability: 1 - Experimental - ```C typedef enum { napi_key_all_properties = 0, @@ -1705,10 +1705,9 @@ Property filter bits. They can be or'ed to build a composite filter. #### napi_key_conversion -> Stability: 1 - Experimental - ```C typedef enum { napi_key_keep_numbers, @@ -2262,10 +2261,9 @@ The JavaScript `Number` type is described in #### napi_create_bigint_int64 -> Stability: 1 - Experimental - ```C napi_status napi_create_bigint_int64(napi_env env, int64_t value, @@ -2283,10 +2281,9 @@ This API converts the C `int64_t` type to the JavaScript `BigInt` type. #### napi_create_bigint_uint64 -> Stability: 1 - Experimental - ```C napi_status napi_create_bigint_uint64(napi_env env, uint64_t value, @@ -2304,10 +2301,9 @@ This API converts the C `uint64_t` type to the JavaScript `BigInt` type. #### napi_create_bigint_words -> Stability: 1 - Experimental - ```C napi_status napi_create_bigint_words(napi_env env, int sign_bit, @@ -2653,10 +2649,9 @@ This API returns the C double primitive equivalent of the given JavaScript #### napi_get_value_bigint_int64 -> Stability: 1 - Experimental - ```C napi_status napi_get_value_bigint_int64(napi_env env, napi_value value, @@ -2680,10 +2675,9 @@ This API returns the C `int64_t` primitive equivalent of the given JavaScript #### napi_get_value_bigint_uint64 -> Stability: 1 - Experimental - ```C napi_status napi_get_value_bigint_uint64(napi_env env, napi_value value, @@ -2707,10 +2701,9 @@ This API returns the C `uint64_t` primitive equivalent of the given JavaScript #### napi_get_value_bigint_words -> Stability: 1 - Experimental - ```C napi_status napi_get_value_bigint_words(napi_env env, napi_value value, @@ -3595,10 +3588,9 @@ included. #### napi_get_all_property_names -> Stability: 1 - Experimental - ```C napi_get_all_property_names(napi_env env, napi_value object, diff --git a/doc/api/net.md b/doc/api/net.md index 84bda5e4e9b..ea0b9139fd3 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -939,7 +939,7 @@ added: v0.1.90 * Returns: {boolean} Sends data on the socket. The second parameter specifies the encoding in the -case of a string — it defaults to UTF8 encoding. +case of a string. It defaults to UTF8 encoding. Returns `true` if the entire data was flushed successfully to the kernel buffer. Returns `false` if all or part of the data was queued in user memory. diff --git a/doc/api/os.md b/doc/api/os.md index bb5bd612382..d8a75e8c1ee 100644 --- a/doc/api/os.md +++ b/doc/api/os.md @@ -389,6 +389,20 @@ operating system response. Throws a [`SystemError`][] if a user has no `username` or `homedir`. +## `os.version()` + + +* Returns {string} + +Returns a string identifying the kernel version. + +On POSIX systems, the operating system release is determined by calling +[uname(3)][]. On Windows, `pRtlGetVersion` is used, and if it is not available, +`GetVersionExW()` will be used. See +https://en.wikipedia.org/wiki/Uname#Examples for more information. + ## OS Constants The following constants are exported by `os.constants`. diff --git a/doc/api/path.md b/doc/api/path.md index 477ef2cab09..c05a5c29efb 100644 --- a/doc/api/path.md +++ b/doc/api/path.md @@ -389,7 +389,7 @@ path.parse('/home/user/dir/file.txt'); │ root │ │ name │ ext │ " / home/user/dir / file .txt " └──────┴──────────────┴──────┴─────┘ -(all spaces in the "" line should be ignored — they are purely for formatting) +(All spaces in the "" line should be ignored. They are purely for formatting.) ``` On Windows: @@ -411,7 +411,7 @@ path.parse('C:\\path\\dir\\file.txt'); │ root │ │ name │ ext │ " C:\ path\dir \ file .txt " └──────┴──────────────┴──────┴─────┘ -(all spaces in the "" line should be ignored — they are purely for formatting) +(All spaces in the "" line should be ignored. They are purely for formatting.) ``` A [`TypeError`][] is thrown if `path` is not a string. diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index 36668b5cc53..00b52d96889 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -203,7 +203,7 @@ The value may be one of: ### performanceEntry.flags * {number} diff --git a/doc/api/policy.md b/doc/api/policy.md index bf4cc214552..35ea48b40b4 100644 --- a/doc/api/policy.md +++ b/doc/api/policy.md @@ -166,9 +166,9 @@ only with care after auditing a module to ensure its behavior is valid. #### Example: Patched Dependency -Since a dependency can be redirected, you can provide attenuated or modified -forms of dependencies as fits your application. For example, you could log -data about timing of function durations by wrapping the original: +Redirected dependencies can provide attenuated or modified functionality as fits +the application. For example, log data about timing of function durations by +wrapping the original: ```js const original = require('fn'); diff --git a/doc/api/process.md b/doc/api/process.md index 3ccd9fc776d..59e64de17fe 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -190,7 +190,7 @@ rejection handler. There is no notion of a top level for a `Promise` chain at which rejections can always be handled. Being inherently asynchronous in nature, a `Promise` -rejection can be handled at a future point in time — possibly much later than +rejection can be handled at a future point in time, possibly much later than the event loop turn it takes for the `'unhandledRejection'` event to be emitted. Another way of stating this is that, unlike in synchronous code where there is @@ -502,7 +502,7 @@ process.on('SIGTERM', handle); * `'SIGTERM'` is not supported on Windows, it can be listened on. * `'SIGINT'` from the terminal is supported on all platforms, and can usually be generated with `+C` (though this may be configurable). It is not - generated when terminal raw mode is enabled. + generated when [terminal raw mode][] is enabled and `+C` is used. * `'SIGBREAK'` is delivered on Windows when `+` is pressed, on non-Windows platforms it can be listened on, but there is no way to send or generate it. @@ -833,7 +833,7 @@ added: v0.7.2 * {number} -The port used by Node.js's debugger when enabled. +The port used by the Node.js debugger when enabled. ```js process.debugPort = 5858; @@ -1510,7 +1510,7 @@ is no entry script. * {string|undefined} @@ -387,7 +387,7 @@ process.stdin.on('keypress', (c, k) => { ### `rl.cursor` * {number|undefined} @@ -456,7 +456,7 @@ the current position of the cursor down. diff --git a/doc/api/stream.md b/doc/api/stream.md index cf3ecd461c1..69be69b5516 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -385,6 +385,10 @@ This is a destructive and immediate way to destroy a stream. Previous calls to `write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. Use `end()` instead of destroy if data should flush before close, or wait for the `'drain'` event before destroying the stream. + +Once `destroy()` has been called any further calls will be a noop and no +further errors except from `_destroy` may be emitted as `'error'`. + Implementors should not override this method, but instead implement [`writable._destroy()`][writable-_destroy]. @@ -585,8 +589,8 @@ The `writable.write()` method writes some data to the stream, and calls the supplied `callback` once the data has been fully handled. If an error occurs, the `callback` *may or may not* be called with the error as its first argument. To reliably detect write errors, add a listener for the -`'error'` event. If `callback` is called with an error, it will be called -before the `'error'` event is emitted. +`'error'` event. The `callback` is called asynchronously and before `'error'` is +emitted. The return value is `true` if the internal buffer is less than the `highWaterMark` configured when the stream was created after admitting `chunk`. @@ -953,6 +957,10 @@ Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'` event (unless `emitClose` is set to `false`). After this call, the readable stream will release any internal resources and subsequent calls to `push()` will be ignored. + +Once `destroy()` has been called any further calls will be a noop and no +further errors except from `_destroy` may be emitted as `'error'`. + Implementors should not override this method, but instead implement [`readable._destroy()`][readable-_destroy]. @@ -1484,6 +1492,9 @@ Implementors should not override this method, but instead implement The default implementation of `_destroy()` for `Transform` also emit `'close'` unless `emitClose` is set in false. +Once `destroy()` has been called any further calls will be a noop and no +further errors except from `_destroy` may be emitted as `'error'`. + ### `stream.finished(stream[, options], callback)` @@ -1887,11 +1898,11 @@ This function MUST NOT be called by application code directly. It should be implemented by child classes, and called by the internal `Writable` class methods only. -The `callback` method must be called to signal either that the write completed -successfully or failed with an error. The first argument passed to the -`callback` must be the `Error` object if the call failed or `null` if the -write succeeded. The `callback` method will always be called asynchronously and -before `'error'` is emitted. +The `callback` function must be called synchronously inside of +`writable._write()` or asynchronously (i.e. different tick) to signal either +that the write completed successfully or failed with an error. +The first argument passed to the `callback` must be the `Error` object if the +call failed or `null` if the write succeeded. All calls to `writable.write()` that occur between the time `writable._write()` is called and the `callback` is called will cause the written data to be diff --git a/doc/api/tls.md b/doc/api/tls.md index fee6e33d610..55dbf3b8d42 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1094,6 +1094,39 @@ See [SSL_get_shared_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. +### `tlsSocket.exportKeyingMaterial(length, label[, context])` + + +* `length` {number} number of bytes to retrieve from keying material +* `label` {string} an application specific label, typically this will be a +value from the +[IANA Exporter Label Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). +* `context` {Buffer} Optionally provide a context. + +* Returns: {Buffer} requested bytes of the keying material + +Keying material is used for validations to prevent different kind of attacks in +network protocols, for example in the specifications of IEEE 802.1X. + +Example + +```js +const keyingMaterial = tlsSocket.exportKeyingMaterial( + 128, + 'client finished'); + +/** + Example return value of keyingMaterial: + +*/ +``` +See the OpenSSL [`SSL_export_keying_material`][] documentation for more +information. + ### `tlsSocket.getTLSTicket()` + +> Stability: 1 - Experimental + +Measure the memory known to V8 and used by the current execution context +or a specified context. + +* `options` {Object} Optional. + * `mode` {string} Either `'summary'` or `'detailed'`. + **Default:** `'summary'` + * `context` {Object} Optional. A [contextified][] object returned + by `vm.createContext()`. If not specified, measure the memory + usage of the current context where `vm.measureMemory()` is invoked. +* Returns: {Promise} If the memory is successfully measured the promise will + resolve with an object containing information about the memory usage. + +The format of the object that the returned Promise may resolve with is +specific to the V8 engine and may change from one version of V8 to the next. + +The returned result is different from the statistics returned by +`v8.getHeapSpaceStatistics()` in that `vm.measureMemory()` measures +the memory reachable by V8 from a specific context, while +`v8.getHeapSpaceStatistics()` measures the memory used by an instance +of V8 engine, which can switch among multiple contexts that reference +objects in the heap of one engine. + +```js +const vm = require('vm'); +// Measure the memory used by the current context and return the result +// in summary. +vm.measureMemory({ mode: 'summary' }) + // Is the same as vm.measureMemory() + .then((result) => { + // The current format is: + // { total: { jsMemoryEstimate: 2211728, jsMemoryRange: [ 0, 2211728 ] } } + console.log(result); + }); + +const context = vm.createContext({}); +vm.measureMemory({ mode: 'detailed' }, context) + .then((result) => { + // At the moment the detailed format is the same as the summary one. + console.log(result); + }); +``` + ## Class: `vm.Module` + +* Returns: {Promise} A promise for a Readable Stream containing + a V8 heap snapshot + +Returns a readable stream for a V8 snapshot of the current state of the Worker. +See [`v8.getHeapSnapshot()`][] for more details. + +If the Worker thread is no longer running, which may occur before the +[`'exit'` event][] is emitted, the returned `Promise` will be rejected +immediately with an [`ERR_WORKER_NOT_RUNNING`][] error. + ### `worker.postMessage(value[, transferList])` - -* Returns: {Promise} A promise for a Readable Stream containing - a V8 heap snapshot - -Returns a readable stream for a V8 snapshot of the current state of the Worker. -See [`v8.getHeapSnapshot()`][] for more details. - -If the Worker thread is no longer running, which may occur before the -[`'exit'` event][] is emitted, the returned `Promise` will be rejected -immediately with an [`ERR_WORKER_NOT_RUNNING`][] error. - ### `worker.terminate()` -Each zlib-based class takes an `options` object. All options are optional. +Each zlib-based class takes an `options` object. No options are required. Some options are only relevant when compressing and are ignored by the decompression classes. @@ -1058,6 +1156,6 @@ Decompress a chunk of data with [`Unzip`][]. [Brotli parameters]: #zlib_brotli_constants [Memory Usage Tuning]: #zlib_memory_usage_tuning [RFC 7932]: https://www.rfc-editor.org/rfc/rfc7932.txt -[pool size]: cli.html#cli_uv_threadpool_size_size +[Streams API]: stream.md [zlib documentation]: https://zlib.net/manual.html#Constants [zlib.createGzip example]: #zlib_zlib diff --git a/doc/changelogs/CHANGELOG_V12.md b/doc/changelogs/CHANGELOG_V12.md index 8b860d9e460..d3341b1eb48 100644 --- a/doc/changelogs/CHANGELOG_V12.md +++ b/doc/changelogs/CHANGELOG_V12.md @@ -10,6 +10,7 @@ +12.16.1
12.16.0
12.15.0
12.14.1
@@ -54,6 +55,69 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2020-02-18, Version 12.16.1 'Erbium' (LTS), @MylesBorins + +### Notable changes + +Node.js 12.16.0 included 6 regressions that are being fixed in this release + +**Accidental Unflagging of Self Resolving Modules**: + +12.16.0 included a large update to the ESM implementation. One of the new features, +Self Referential Modules, was accidentally released without requiring the `--experimental-modules` +flag. This release is being made to appropriately flag the feature. + +**Process Cleanup Changed Introduced WASM-Related Assertion**: + +A change during Node.js process cleanup led to a crash in combination with +specific usage of WASM. This has been fixed by partially reverted said change. +A regression test and a full fix are being worked on and will likely be included +in future 12.x and 13.x releases. + +**Use Largepages Runtime Option Introduced Linking Failure**: + +A Semver-Minor change to introduce `--use-largepages` as a runtime option +introduced a linking failure. This had been fixed in master but regressed as the fix has not yet gone out +in a Current release. The feature has been reverted, but will be able to reland with a fix in a future +Semver-Minor release. + +**Async Hooks was Causing an Exception When Handling Errors**: + +Changes in async hooks internals introduced a case where an internal api call could be called with undefined +causing a process to crash. The change to async hooks was reverted. A regression test and fix has been proposed +and the change could re-land in a future Semver-Patch release if the regression is reliably fixed. + +**New Enumerable Read-Only Property on EventEmitter breaks @types/extend** + +A new property for enumerating events was added to the EventEmitter class. This +broke existing code that was using the `@types/extend` module for extending classses +as `@types/extend` was attemping to write over the existing field which the new +change made read-only. As this is the first property on EventEmitter that is +read-only this feature could be considered Semver-Major. The new feature has been +reverted but could re-land in a future Semver-Minor release if a non breaking way +of applying it is found. + +**Exceptions in the HTTP parser were not emitting an uncaughtException** + +A refactoring to Node.js interanls resulted in a bug where errors in the HTTP +parser were not being emitted by `process.on('uncaughtException')` when the `async_hooks` `after` +hook exists. The fix to this bug has been included in this release. + +### Commits + +* [[`51fdd759b9`](https://github.com/nodejs/node/commit/51fdd759b9)] - **async_hooks**: ensure event after been emitted on runInAsyncScope (legendecas) [#31784](https://github.com/nodejs/node/pull/31784) +* [[`7a1b0ac06f`](https://github.com/nodejs/node/commit/7a1b0ac06f)] - ***Revert*** "**build**: re-introduce --use-largepages as no-op" (Myles Borins) [#31782](https://github.com/nodejs/node/pull/31782) +* [[`a53eeca2a9`](https://github.com/nodejs/node/commit/a53eeca2a9)] - ***Revert*** "**build**: switch realpath to pwd" (Myles Borins) [#31782](https://github.com/nodejs/node/pull/31782) +* [[`6d432994e6`](https://github.com/nodejs/node/commit/6d432994e6)] - ***Revert*** "**build**: warn upon --use-largepages config option" (Myles Borins) [#31782](https://github.com/nodejs/node/pull/31782) +* [[`a5bc00af12`](https://github.com/nodejs/node/commit/a5bc00af12)] - ***Revert*** "**events**: allow monitoring error events" (Myles Borins) +* [[`f0b2d875d9`](https://github.com/nodejs/node/commit/f0b2d875d9)] - **module**: 12.x self resolve flag as experimental modules (Guy Bedford) [#31757](https://github.com/nodejs/node/pull/31757) +* [[`42b68a4e24`](https://github.com/nodejs/node/commit/42b68a4e24)] - **src**: inform callback scopes about exceptions in HTTP parser (Anna Henningsen) [#31801](https://github.com/nodejs/node/pull/31801) +* [[`065a32f064`](https://github.com/nodejs/node/commit/065a32f064)] - ***Revert*** "**src**: make --use-largepages a runtime option" (Myles Borins) [#31782](https://github.com/nodejs/node/pull/31782) +* [[`3d5beebc62`](https://github.com/nodejs/node/commit/3d5beebc62)] - ***Revert*** "**src**: make large\_pages node.cc include conditional" (Myles Borins) [#31782](https://github.com/nodejs/node/pull/31782) +* [[`43d02e20e0`](https://github.com/nodejs/node/commit/43d02e20e0)] - **src**: keep main-thread Isolate attached to platform during Dispose (Anna Henningsen) [#31795](https://github.com/nodejs/node/pull/31795) +* [[`7a5954ef26`](https://github.com/nodejs/node/commit/7a5954ef26)] - **src**: fix -Winconsistent-missing-override warning (Colin Ihrig) [#30549](https://github.com/nodejs/node/pull/30549) + ## 2020-02-11, Version 12.16.0 'Erbium' (LTS), @targos diff --git a/doc/changelogs/CHANGELOG_V13.md b/doc/changelogs/CHANGELOG_V13.md index f3234798733..9eef50a7667 100644 --- a/doc/changelogs/CHANGELOG_V13.md +++ b/doc/changelogs/CHANGELOG_V13.md @@ -9,6 +9,9 @@ +13.10.1
+13.10.0
+13.9.0
13.8.0
13.7.0
13.6.0
@@ -38,6 +41,392 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2020-03-04, Version 13.10.1 (Current), @MylesBorins + +### Notable Changes + +In Node.js 13.9.0 deps/zlib was switched to the chromium maintained implementation. This change +had the unforseen consequence of breaking building from the tarballs we release as we were too +aggressively removing `unneccessary files` from the `deps/zlib` folder. This release includes +a patch that ensures that individuals will once again be able to build Node.js from source. + +### Commits + +* [[`723aa41d96`](https://github.com/nodejs/node/commit/723aa41d96)] - **build**: fix zlib tarball generation (Shelley Vohr) [#32094](https://github.com/nodejs/node/pull/32094) +* [[`9c1ac50fc5`](https://github.com/nodejs/node/commit/9c1ac50fc5)] - **build**: fix building with ninja (Richard Lau) [#32071](https://github.com/nodejs/node/pull/32071) +* [[`478450d6b3`](https://github.com/nodejs/node/commit/478450d6b3)] - **build**: add asan check in Github action (gengjiawen) [#31902](https://github.com/nodejs/node/pull/31902) +* [[`0fc45f80b5`](https://github.com/nodejs/node/commit/0fc45f80b5)] - **crypto**: simplify exportKeyingMaterial (Tobias Nießen) [#31922](https://github.com/nodejs/node/pull/31922) +* [[`4dc59b91a7`](https://github.com/nodejs/node/commit/4dc59b91a7)] - **dgram**: make UDPWrap more reusable (Anna Henningsen) [#31871](https://github.com/nodejs/node/pull/31871) +* [[`4ed720e940`](https://github.com/nodejs/node/commit/4ed720e940)] - **doc**: visibility of Worker threads cli options (Harshitha KP) [#31380](https://github.com/nodejs/node/pull/31380) +* [[`2518213a1b`](https://github.com/nodejs/node/commit/2518213a1b)] - **doc**: improve doc/markdown file organization coherence (ConorDavenport) [#31792](https://github.com/nodejs/node/pull/31792) +* [[`ba3f7ff94d`](https://github.com/nodejs/node/commit/ba3f7ff94d)] - **doc**: update stream.pipeline() signature (vsemozhetbyt) [#31789](https://github.com/nodejs/node/pull/31789) +* [[`3c8daa3aa0`](https://github.com/nodejs/node/commit/3c8daa3aa0)] - **events**: convert errorMonitor to a normal property (Gerhard Stoebich) [#31848](https://github.com/nodejs/node/pull/31848) +* [[`6b44df2415`](https://github.com/nodejs/node/commit/6b44df2415)] - **perf,src**: add HistogramBase and internal/histogram.js (James M Snell) [#31988](https://github.com/nodejs/node/pull/31988) +* [[`6a9cea9ed2`](https://github.com/nodejs/node/commit/6a9cea9ed2)] - **src**: pass resource object along with InternalMakeCallback (Anna Henningsen) [#32063](https://github.com/nodejs/node/pull/32063) +* [[`70f046010c`](https://github.com/nodejs/node/commit/70f046010c)] - **src**: start the .text section with an asm symbol (Gabriel Schulhof) [#31981](https://github.com/nodejs/node/pull/31981) +* [[`755da035ce`](https://github.com/nodejs/node/commit/755da035ce)] - **src**: add node\_crypto\_common and refactor (James M Snell) [#32016](https://github.com/nodejs/node/pull/32016) +* [[`4d5318c164`](https://github.com/nodejs/node/commit/4d5318c164)] - **src**: improve handling of internal field counting (James M Snell) [#31960](https://github.com/nodejs/node/pull/31960) +* [[`1539928ed9`](https://github.com/nodejs/node/commit/1539928ed9)] - **test**: add GC test for disabled AsyncLocalStorage (Andrey Pechkurov) [#31995](https://github.com/nodejs/node/pull/31995) +* [[`be90817558`](https://github.com/nodejs/node/commit/be90817558)] - **test**: remove common.port from test-tls-securepair-client (Rich Trott) [#32024](https://github.com/nodejs/node/pull/32024) + + +## 2020-03-04, Version 13.10.0 (Current), @codebytere + +### Notable Changes + +* **async_hooks** + * introduce async-context API (vdeturckheim) [#26540](https://github.com/nodejs/node/pull/26540) +* **stream** + * support passing generator functions into pipeline() (Robert Nagy) [#31223](https://github.com/nodejs/node/pull/31223) +* **tls** + * expose SSL\_export\_keying\_material (simon) [#31814](https://github.com/nodejs/node/pull/31814) +* **vm** + * implement vm.measureMemory() for per-context memory measurement (Joyee Cheung) [#31824](https://github.com/nodejs/node/pull/31824) + +### Commits + +* [[`f71fc9044a`](https://github.com/nodejs/node/commit/f71fc9044a)] - **async_hooks**: add store arg in AsyncLocalStorage (Andrey Pechkurov) [#31930](https://github.com/nodejs/node/pull/31930) +* [[`6af9e7e0c3`](https://github.com/nodejs/node/commit/6af9e7e0c3)] - **async_hooks**: executionAsyncResource matches in hooks (Gerhard Stoebich) [#31821](https://github.com/nodejs/node/pull/31821) +* [[`877ab97286`](https://github.com/nodejs/node/commit/877ab97286)] - **(SEMVER-MINOR)** **async_hooks**: introduce async-context API (vdeturckheim) [#26540](https://github.com/nodejs/node/pull/26540) +* [[`9a41ced0d1`](https://github.com/nodejs/node/commit/9a41ced0d1)] - **build**: only lint markdown files that have changed (POSIX-only) (Rich Trott) [#31923](https://github.com/nodejs/node/pull/31923) +* [[`ca4407105e`](https://github.com/nodejs/node/commit/ca4407105e)] - **build**: add missing comma in node.gyp (cjihrig) [#31959](https://github.com/nodejs/node/pull/31959) +* [[`4dffd0437d`](https://github.com/nodejs/node/commit/4dffd0437d)] - **cli**: --perf-prof only works on Linux (Shelley Vohr) [#31892](https://github.com/nodejs/node/pull/31892) +* [[`4d05508aa8`](https://github.com/nodejs/node/commit/4d05508aa8)] - **crypto**: turn impossible DH errors into assertions (Tobias Nießen) [#31934](https://github.com/nodejs/node/pull/31934) +* [[`d0e94fc77e`](https://github.com/nodejs/node/commit/d0e94fc77e)] - **crypto**: fix ieee-p1363 for createVerify (Tobias Nießen) [#31876](https://github.com/nodejs/node/pull/31876) +* [[`fbaab7d854`](https://github.com/nodejs/node/commit/fbaab7d854)] - **deps**: openssl: cherry-pick 4dcb150ea30f (Adam Majer) [#32002](https://github.com/nodejs/node/pull/32002) +* [[`e6125cd53b`](https://github.com/nodejs/node/commit/e6125cd53b)] - **deps**: V8: backport f7771e5b0cc4 (Matheus Marchini) [#31957](https://github.com/nodejs/node/pull/31957) +* [[`c27f0d10c4`](https://github.com/nodejs/node/commit/c27f0d10c4)] - **deps**: update zlib to upstream d7f3ca9 (Sam Roberts) [#31800](https://github.com/nodejs/node/pull/31800) +* [[`b30a6981d3`](https://github.com/nodejs/node/commit/b30a6981d3)] - **deps**: move zlib maintenance info to guides (Sam Roberts) [#31800](https://github.com/nodejs/node/pull/31800) +* [[`cd30dbb0d6`](https://github.com/nodejs/node/commit/cd30dbb0d6)] - **doc**: revise --zero-fill-buffers text in buffer.md (Rich Trott) [#32019](https://github.com/nodejs/node/pull/32019) +* [[`166579f84b`](https://github.com/nodejs/node/commit/166579f84b)] - **doc**: add link to sem-ver info (unknown) [#31985](https://github.com/nodejs/node/pull/31985) +* [[`e3258fd148`](https://github.com/nodejs/node/commit/e3258fd148)] - **doc**: update zlib doc (James M Snell) [#31665](https://github.com/nodejs/node/pull/31665) +* [[`8516602ba0`](https://github.com/nodejs/node/commit/8516602ba0)] - **doc**: clarify http2.connect authority details (James M Snell) [#31828](https://github.com/nodejs/node/pull/31828) +* [[`c5acf0a13b`](https://github.com/nodejs/node/commit/c5acf0a13b)] - **doc**: updated YAML version representation in readline.md (Rich Trott) [#31924](https://github.com/nodejs/node/pull/31924) +* [[`4c6343fdea`](https://github.com/nodejs/node/commit/4c6343fdea)] - **doc**: describe how to update zlib (Sam Roberts) [#31800](https://github.com/nodejs/node/pull/31800) +* [[`a46839279f`](https://github.com/nodejs/node/commit/a46839279f)] - **doc**: update releases guide re pushing tags (Myles Borins) [#31855](https://github.com/nodejs/node/pull/31855) +* [[`15cc9b0126`](https://github.com/nodejs/node/commit/15cc9b0126)] - **doc**: update assert.rejects() docs with a validation function example (Eric Eastwood) [#31271](https://github.com/nodejs/node/pull/31271) +* [[`2046652b4e`](https://github.com/nodejs/node/commit/2046652b4e)] - **doc**: fix anchor for ERR\_TLS\_INVALID\_CONTEXT (Tobias Nießen) [#31915](https://github.com/nodejs/node/pull/31915) +* [[`091b4bfe2d`](https://github.com/nodejs/node/commit/091b4bfe2d)] - **doc**: add note about ssh key to releases (Shelley Vohr) [#31856](https://github.com/nodejs/node/pull/31856) +* [[`3438937a37`](https://github.com/nodejs/node/commit/3438937a37)] - **doc**: fix notable changes for v13.9.0 (Shelley Vohr) [#31857](https://github.com/nodejs/node/pull/31857) +* [[`672f76d6bd`](https://github.com/nodejs/node/commit/672f76d6bd)] - **doc**: reword possessive form of Node.js in adding-new-napi-api.md (Rich Trott) [#31748](https://github.com/nodejs/node/pull/31748) +* [[`3eaf37767e`](https://github.com/nodejs/node/commit/3eaf37767e)] - **doc**: reword possessive form of Node.js in http.md (Rich Trott) [#31748](https://github.com/nodejs/node/pull/31748) +* [[`cb210e6b16`](https://github.com/nodejs/node/commit/cb210e6b16)] - **doc**: reword possessive form of Node.js in process.md (Rich Trott) [#31748](https://github.com/nodejs/node/pull/31748) +* [[`3969af43b4`](https://github.com/nodejs/node/commit/3969af43b4)] - **doc**: reword possessive form of Node.js in debugger.md (Rich Trott) [#31748](https://github.com/nodejs/node/pull/31748) +* [[`f9526057b3`](https://github.com/nodejs/node/commit/f9526057b3)] - **doc**: move gireeshpunathil to TSC emeritus (Gireesh Punathil) [#31770](https://github.com/nodejs/node/pull/31770) +* [[`b07175853f`](https://github.com/nodejs/node/commit/b07175853f)] - **doc**: pronouns for @Fishrock123 (Jeremiah Senkpiel) [#31725](https://github.com/nodejs/node/pull/31725) +* [[`7f4d6ee8ea`](https://github.com/nodejs/node/commit/7f4d6ee8ea)] - **doc**: move @Fishrock123 to TSC Emeriti (Jeremiah Senkpiel) [#31725](https://github.com/nodejs/node/pull/31725) +* [[`b177bba555`](https://github.com/nodejs/node/commit/b177bba555)] - **doc**: move @Fishrock123 to a previous releaser (Jeremiah Senkpiel) [#31725](https://github.com/nodejs/node/pull/31725) +* [[`9e4aad705f`](https://github.com/nodejs/node/commit/9e4aad705f)] - **doc**: fix typos in doc/api/https.md (Jeff) [#31793](https://github.com/nodejs/node/pull/31793) +* [[`eb2dce8342`](https://github.com/nodejs/node/commit/eb2dce8342)] - **doc**: claim ABI version 82 for Electron 10 (Samuel Attard) [#31778](https://github.com/nodejs/node/pull/31778) +* [[`db291aaf06`](https://github.com/nodejs/node/commit/db291aaf06)] - **doc**: guide - using valgrind to debug memory leaks (Michael Dawson) [#31501](https://github.com/nodejs/node/pull/31501) +* [[`aa16d80c05`](https://github.com/nodejs/node/commit/aa16d80c05)] - **doc,crypto**: re-document oaepLabel option (Ben Noordhuis) [#31825](https://github.com/nodejs/node/pull/31825) +* [[`9079bb42ea`](https://github.com/nodejs/node/commit/9079bb42ea)] - **http2**: make compat finished match http/1 (Robert Nagy) [#24347](https://github.com/nodejs/node/pull/24347) +* [[`3bd8feac0c`](https://github.com/nodejs/node/commit/3bd8feac0c)] - **meta**: move aqrln to emeritus (Rich Trott) [#31997](https://github.com/nodejs/node/pull/31997) +* [[`c801045fcd`](https://github.com/nodejs/node/commit/c801045fcd)] - **meta**: move jbergstroem to emeritus (Rich Trott) [#31996](https://github.com/nodejs/node/pull/31996) +* [[`ded3890bec`](https://github.com/nodejs/node/commit/ded3890bec)] - **meta**: move maclover7 to Emeritus (Rich Trott) [#31994](https://github.com/nodejs/node/pull/31994) +* [[`91ce69a554`](https://github.com/nodejs/node/commit/91ce69a554)] - **meta**: move Glen Keane to Collaborator Emeritus (Rich Trott) [#31993](https://github.com/nodejs/node/pull/31993) +* [[`b74c40eda6`](https://github.com/nodejs/node/commit/b74c40eda6)] - **meta**: move not-an-aardvark to emeritus (Rich Trott) [#31928](https://github.com/nodejs/node/pull/31928) +* [[`61a0d8b6cd`](https://github.com/nodejs/node/commit/61a0d8b6cd)] - **meta**: move julianduque to emeritus (Rich Trott) [#31863](https://github.com/nodejs/node/pull/31863) +* [[`94a471a422`](https://github.com/nodejs/node/commit/94a471a422)] - **meta**: move eljefedelrodeodeljefe to emeritus (Rich Trott) [#31735](https://github.com/nodejs/node/pull/31735) +* [[`9e3e6763fa`](https://github.com/nodejs/node/commit/9e3e6763fa)] - **module**: port source map sort logic from chromium (bcoe) [#31927](https://github.com/nodejs/node/pull/31927) +* [[`b9f3bfe6c8`](https://github.com/nodejs/node/commit/b9f3bfe6c8)] - **module**: disable conditional exports, self resolve warnings (Guy Bedford) [#31845](https://github.com/nodejs/node/pull/31845) +* [[`bbb6cc733c`](https://github.com/nodejs/node/commit/bbb6cc733c)] - **module**: package "exports" error refinements (Guy Bedford) [#31625](https://github.com/nodejs/node/pull/31625) +* [[`6adbfac9b0`](https://github.com/nodejs/node/commit/6adbfac9b0)] - **repl**: eager-evaluate input in parens (Shelley Vohr) [#31943](https://github.com/nodejs/node/pull/31943) +* [[`6a35b0d102`](https://github.com/nodejs/node/commit/6a35b0d102)] - **src**: don't run bootstrapper in CreateEnvironment (Shelley Vohr) [#31910](https://github.com/nodejs/node/pull/31910) +* [[`3497370d66`](https://github.com/nodejs/node/commit/3497370d66)] - **src**: move InternalCallbackScope to StartExecution (Shelley Vohr) [#31944](https://github.com/nodejs/node/pull/31944) +* [[`f62967c827`](https://github.com/nodejs/node/commit/f62967c827)] - **src**: enable `StreamPipe` for generic `StreamBase`s (Anna Henningsen) [#31869](https://github.com/nodejs/node/pull/31869) +* [[`776f379124`](https://github.com/nodejs/node/commit/776f379124)] - **src**: include large pages source unconditionally (Gabriel Schulhof) [#31904](https://github.com/nodejs/node/pull/31904) +* [[`9f68e14052`](https://github.com/nodejs/node/commit/9f68e14052)] - **src**: elevate v8 namespaces (Harshitha KP) [#31901](https://github.com/nodejs/node/pull/31901) +* [[`8fa6373e62`](https://github.com/nodejs/node/commit/8fa6373e62)] - **src**: allow unique\_ptrs with custom deleter in memory tracker (Anna Henningsen) [#31870](https://github.com/nodejs/node/pull/31870) +* [[`88ccb444e3`](https://github.com/nodejs/node/commit/88ccb444e3)] - **src**: move BaseObject subclass dtors/ctors out of node\_crypto.h (Anna Henningsen) [#31872](https://github.com/nodejs/node/pull/31872) +* [[`98d262e5f3`](https://github.com/nodejs/node/commit/98d262e5f3)] - **src**: inform callback scopes about exceptions in HTTP parser (Anna Henningsen) [#31801](https://github.com/nodejs/node/pull/31801) +* [[`57302f866e`](https://github.com/nodejs/node/commit/57302f866e)] - **src**: prefer 3-argument Array::New() (Anna Henningsen) [#31775](https://github.com/nodejs/node/pull/31775) +* [[`8a2b62e4cd`](https://github.com/nodejs/node/commit/8a2b62e4cd)] - **stream**: ensure pipeline always destroys streams (Robert Nagy) [#31940](https://github.com/nodejs/node/pull/31940) +* [[`313ecaabe5`](https://github.com/nodejs/node/commit/313ecaabe5)] - **stream**: fix broken pipeline error propagation (Robert Nagy) [#31835](https://github.com/nodejs/node/pull/31835) +* [[`8ad64b8e53`](https://github.com/nodejs/node/commit/8ad64b8e53)] - **(SEMVER-MINOR)** **stream**: support passing generator functions into pipeline() (Robert Nagy) [#31223](https://github.com/nodejs/node/pull/31223) +* [[`d0a00711f8`](https://github.com/nodejs/node/commit/d0a00711f8)] - **stream**: invoke buffered write callbacks on error (Robert Nagy) [#30596](https://github.com/nodejs/node/pull/30596) +* [[`1bca7b6c70`](https://github.com/nodejs/node/commit/1bca7b6c70)] - **test**: move test-inspector-module to parallel (Rich Trott) [#32025](https://github.com/nodejs/node/pull/32025) +* [[`932563473c`](https://github.com/nodejs/node/commit/932563473c)] - **test**: improve disable AsyncLocalStorage test (Andrey Pechkurov) [#31998](https://github.com/nodejs/node/pull/31998) +* [[`49864d161e`](https://github.com/nodejs/node/commit/49864d161e)] - **test**: fix flaky test-dns-any.js (Rich Trott) [#32017](https://github.com/nodejs/node/pull/32017) +* [[`38494746a6`](https://github.com/nodejs/node/commit/38494746a6)] - **test**: fix flaky test-gc-net-timeout (Robert Nagy) [#31918](https://github.com/nodejs/node/pull/31918) +* [[`b6d33f671a`](https://github.com/nodejs/node/commit/b6d33f671a)] - **test**: change test to not be sensitive to buffer send size (Rusty Conover) [#31499](https://github.com/nodejs/node/pull/31499) +* [[`cef5502055`](https://github.com/nodejs/node/commit/cef5502055)] - **test**: remove sequential/test-https-keep-alive-large-write.js (Rusty Conover) [#31499](https://github.com/nodejs/node/pull/31499) +* [[`f1e76488a7`](https://github.com/nodejs/node/commit/f1e76488a7)] - **test**: validate common property usage (Denys Otrishko) [#31933](https://github.com/nodejs/node/pull/31933) +* [[`ab8f060159`](https://github.com/nodejs/node/commit/ab8f060159)] - **test**: fix usage of invalid common properties (Denys Otrishko) [#31933](https://github.com/nodejs/node/pull/31933) +* [[`49c959d636`](https://github.com/nodejs/node/commit/49c959d636)] - **test**: increase timeout in vm-timeout-escape-queuemicrotask (Denys Otrishko) [#31966](https://github.com/nodejs/node/pull/31966) +* [[`04eda02d87`](https://github.com/nodejs/node/commit/04eda02d87)] - **test**: add documentation for common.enoughTestCpu (Rich Trott) [#31931](https://github.com/nodejs/node/pull/31931) +* [[`918c2b67cc`](https://github.com/nodejs/node/commit/918c2b67cc)] - **test**: fix typo in common/index.js (Rich Trott) [#31931](https://github.com/nodejs/node/pull/31931) +* [[`f89fb2751b`](https://github.com/nodejs/node/commit/f89fb2751b)] - **test**: mark empty udp tests flaky on OS X (Sam Roberts) [#31936](https://github.com/nodejs/node/pull/31936) +* [[`e08fef1fda`](https://github.com/nodejs/node/commit/e08fef1fda)] - **test**: add secp224k1 check in crypto-dh-stateless (Daniel Bevenius) [#31715](https://github.com/nodejs/node/pull/31715) +* [[`4fe9e043ef`](https://github.com/nodejs/node/commit/4fe9e043ef)] - **test**: remove common.PORT from assorted pummel tests (Rich Trott) [#31897](https://github.com/nodejs/node/pull/31897) +* [[`7d5776e119`](https://github.com/nodejs/node/commit/7d5776e119)] - **test**: remove flaky designation for test-net-connect-options-port (Rich Trott) [#31841](https://github.com/nodejs/node/pull/31841) +* [[`1933efa62f`](https://github.com/nodejs/node/commit/1933efa62f)] - **test**: remove common.PORT from test-net-write-callbacks.js (Rich Trott) [#31839](https://github.com/nodejs/node/pull/31839) +* [[`87e9014764`](https://github.com/nodejs/node/commit/87e9014764)] - **test**: remove common.PORT from test-net-pause (Rich Trott) [#31749](https://github.com/nodejs/node/pull/31749) +* [[`3fbd5ab265`](https://github.com/nodejs/node/commit/3fbd5ab265)] - **test**: remove common.PORT from test-tls-server-large-request (Rich Trott) [#31749](https://github.com/nodejs/node/pull/31749) +* [[`e76ac1d2c9`](https://github.com/nodejs/node/commit/e76ac1d2c9)] - **test**: remove common.PORT from test-net-throttle (Rich Trott) [#31749](https://github.com/nodejs/node/pull/31749) +* [[`724bf3105b`](https://github.com/nodejs/node/commit/724bf3105b)] - **test**: remove common.PORT from test-net-timeout (Rich Trott) [#31749](https://github.com/nodejs/node/pull/31749) +* [[`60c71dcad2`](https://github.com/nodejs/node/commit/60c71dcad2)] - **test**: add known issue test for sync writable callback (James M Snell) [#31756](https://github.com/nodejs/node/pull/31756) +* [[`2c0b249098`](https://github.com/nodejs/node/commit/2c0b249098)] - **tls**: reduce memory copying and number of BIO buffer allocations (Rusty Conover) [#31499](https://github.com/nodejs/node/pull/31499) +* [[`acb3aff674`](https://github.com/nodejs/node/commit/acb3aff674)] - **(SEMVER-MINOR)** **tls**: expose SSL\_export\_keying\_material (simon) [#31814](https://github.com/nodejs/node/pull/31814) +* [[`f293dcf6de`](https://github.com/nodejs/node/commit/f293dcf6de)] - **tools**: add NODE\_TEST\_NO\_INTERNET to the doc builder (Joyee Cheung) [#31849](https://github.com/nodejs/node/pull/31849) +* [[`79b1f04b15`](https://github.com/nodejs/node/commit/79b1f04b15)] - **tools**: sync gyp code base with node-gyp repo (Michaël Zasso) [#30563](https://github.com/nodejs/node/pull/30563) +* [[`f858f2366c`](https://github.com/nodejs/node/commit/f858f2366c)] - **tools**: update lint-md task to lint for possessives of Node.js (Rich Trott) [#31862](https://github.com/nodejs/node/pull/31862) +* [[`ae3929e958`](https://github.com/nodejs/node/commit/ae3929e958)] - **(SEMVER-MINOR)** **vm**: implement vm.measureMemory() for per-context memory measurement (Joyee Cheung) [#31824](https://github.com/nodejs/node/pull/31824) +* [[`a86cb0e480`](https://github.com/nodejs/node/commit/a86cb0e480)] - **vm**: lazily initialize primordials for vm contexts (Joyee Cheung) [#31738](https://github.com/nodejs/node/pull/31738) +* [[`f2389eba99`](https://github.com/nodejs/node/commit/f2389eba99)] - **worker**: emit runtime error on loop creation failure (Harshitha KP) [#31621](https://github.com/nodejs/node/pull/31621) +* [[`f87ac90849`](https://github.com/nodejs/node/commit/f87ac90849)] - **worker**: unroll file extension regexp (Anna Henningsen) [#31779](https://github.com/nodejs/node/pull/31779) + + +## 2020-02-18, Version 13.9.0 (Current), @codebytere + +### Notable changes + +* **async_hooks** + * add executionAsyncResource (Matteo Collina) [#30959](https://github.com/nodejs/node/pull/30959) +* **crypto** + * add crypto.diffieHellman (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) + * add DH support to generateKeyPair (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) + * simplify DH groups (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) + * add key type 'dh' (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* **test** + * skip keygen tests on arm systems (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* **perf_hooks** + * add property flags to GCPerformanceEntry (Kirill Fomichev) [#29547](https://github.com/nodejs/node/pull/29547) +* **process** + * report ArrayBuffer memory in `memoryUsage()` (Anna Henningsen) [#31550](https://github.com/nodejs/node/pull/31550) +* **readline** + * make tab size configurable (Ruben Bridgewater) [#31318](https://github.com/nodejs/node/pull/31318) +* **report** + * add support for Workers (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* **worker** + * add ability to take heap snapshot from parent thread (Anna Henningsen) [#31569](https://github.com/nodejs/node/pull/31569) +* **added new collaborators** + * add ronag to collaborators (Robert Nagy) [#31498](https://github.com/nodejs/node/pull/31498) + +### Commits + +* [[`2db7593838`](https://github.com/nodejs/node/commit/2db7593838)] - **assert**: align character indicators properly (Ruben Bridgewater) [#31429](https://github.com/nodejs/node/pull/31429) +* [[`a840e9d639`](https://github.com/nodejs/node/commit/a840e9d639)] - **async_hooks**: ensure event after been emitted on runInAsyncScope (legendecas) [#31784](https://github.com/nodejs/node/pull/31784) +* [[`6be51296e4`](https://github.com/nodejs/node/commit/6be51296e4)] - **(SEMVER-MINOR)** **async_hooks**: add executionAsyncResource (Matteo Collina) [#30959](https://github.com/nodejs/node/pull/30959) +* [[`2de085fe93`](https://github.com/nodejs/node/commit/2de085fe93)] - **benchmark**: use let instead of var (Daniele Belardi) [#31592](https://github.com/nodejs/node/pull/31592) +* [[`e37f5100e5`](https://github.com/nodejs/node/commit/e37f5100e5)] - **benchmark**: swap var for let in benchmarks (Alex Ramirez) [#28958](https://github.com/nodejs/node/pull/28958) +* [[`819fb76ba5`](https://github.com/nodejs/node/commit/819fb76ba5)] - ***Revert*** "**benchmark**: refactor helper into a class" (Anna Henningsen) [#31722](https://github.com/nodejs/node/pull/31722) +* [[`8974fa794c`](https://github.com/nodejs/node/commit/8974fa794c)] - ***Revert*** "**benchmark**: add `test` and `all` options and improve errors" (Anna Henningsen) [#31722](https://github.com/nodejs/node/pull/31722) +* [[`30f55cebb6`](https://github.com/nodejs/node/commit/30f55cebb6)] - ***Revert*** "**benchmark**: remove special test entries" (Anna Henningsen) [#31722](https://github.com/nodejs/node/pull/31722) +* [[`1484f5ab6e`](https://github.com/nodejs/node/commit/1484f5ab6e)] - **benchmark**: remove special test entries (Ruben Bridgewater) [#31396](https://github.com/nodejs/node/pull/31396) +* [[`ca343caee3`](https://github.com/nodejs/node/commit/ca343caee3)] - **benchmark**: add `test` and `all` options and improve errors (Ruben Bridgewater) [#31396](https://github.com/nodejs/node/pull/31396) +* [[`9f2c742626`](https://github.com/nodejs/node/commit/9f2c742626)] - **benchmark**: refactor helper into a class (Ruben Bridgewater) [#31396](https://github.com/nodejs/node/pull/31396) +* [[`161db608ae`](https://github.com/nodejs/node/commit/161db608ae)] - **benchmark**: check for and fix multiple end() (Brian White) [#31624](https://github.com/nodejs/node/pull/31624) +* [[`6fe8eda3ca`](https://github.com/nodejs/node/commit/6fe8eda3ca)] - **benchmark**: clean up config resolution in multiple benchmarks (Denys Otrishko) [#31581](https://github.com/nodejs/node/pull/31581) +* [[`ebdcafafeb`](https://github.com/nodejs/node/commit/ebdcafafeb)] - **benchmark**: add MessagePort benchmark (Anna Henningsen) [#31568](https://github.com/nodejs/node/pull/31568) +* [[`eb3c6e9127`](https://github.com/nodejs/node/commit/eb3c6e9127)] - **benchmark**: use let and const instead of var (Daniele Belardi) [#31518](https://github.com/nodejs/node/pull/31518) +* [[`b29badad81`](https://github.com/nodejs/node/commit/b29badad81)] - **benchmark**: fix getStringWidth() benchmark (Rich Trott) [#31476](https://github.com/nodejs/node/pull/31476) +* [[`519134ddb0`](https://github.com/nodejs/node/commit/519134ddb0)] - **buffer**: improve from() performance (Brian White) [#31615](https://github.com/nodejs/node/pull/31615) +* [[`769154de07`](https://github.com/nodejs/node/commit/769154de07)] - **buffer**: improve concat() performance (Brian White) [#31522](https://github.com/nodejs/node/pull/31522) +* [[`9d45393e95`](https://github.com/nodejs/node/commit/9d45393e95)] - **buffer**: improve fill(number) performance (Brian White) [#31489](https://github.com/nodejs/node/pull/31489) +* [[`60a69770f5`](https://github.com/nodejs/node/commit/60a69770f5)] - **build**: add configure option to debug only Node.js part of the binary (Anna Henningsen) [#31644](https://github.com/nodejs/node/pull/31644) +* [[`10f9abe81d`](https://github.com/nodejs/node/commit/10f9abe81d)] - **build**: ignore all the "Debug","Release" folders (ConorDavenport) [#31565](https://github.com/nodejs/node/pull/31565) +* [[`03eade01d7`](https://github.com/nodejs/node/commit/03eade01d7)] - **build**: enable loading internal modules from disk (Gus Caplan) [#31321](https://github.com/nodejs/node/pull/31321) +* [[`a2b7006847`](https://github.com/nodejs/node/commit/a2b7006847)] - **build**: build docs in GitHub Actions CI workflow (Richard Lau) [#31504](https://github.com/nodejs/node/pull/31504) +* [[`2e216aebcb`](https://github.com/nodejs/node/commit/2e216aebcb)] - **build**: do not use setup-node in build workflows (Richard Lau) [#31349](https://github.com/nodejs/node/pull/31349) +* [[`825d089763`](https://github.com/nodejs/node/commit/825d089763)] - **crypto**: fix performance regression (Robert Nagy) [#31742](https://github.com/nodejs/node/pull/31742) +* [[`3c6545f0b4`](https://github.com/nodejs/node/commit/3c6545f0b4)] - **crypto**: improve randomBytes() performance (Brian White) [#31519](https://github.com/nodejs/node/pull/31519) +* [[`f84b34d42c`](https://github.com/nodejs/node/commit/f84b34d42c)] - **crypto**: improve errors in DiffieHellmanGroup (Tobias Nießen) [#31445](https://github.com/nodejs/node/pull/31445) +* [[`4591202e66`](https://github.com/nodejs/node/commit/4591202e66)] - **crypto**: assign and use ERR\_CRYPTO\_UNKNOWN\_CIPHER (Tobias Nießen) [#31437](https://github.com/nodejs/node/pull/31437) +* [[`bf46c304dd`](https://github.com/nodejs/node/commit/bf46c304dd)] - **(SEMVER-MINOR)** **crypto**: add crypto.diffieHellman (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* [[`0d3e095941`](https://github.com/nodejs/node/commit/0d3e095941)] - **(SEMVER-MINOR)** **crypto**: add DH support to generateKeyPair (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* [[`15bd2c9f0c`](https://github.com/nodejs/node/commit/15bd2c9f0c)] - **(SEMVER-MINOR)** **crypto**: simplify DH groups (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* [[`572322fddf`](https://github.com/nodejs/node/commit/572322fddf)] - **(SEMVER-MINOR)** **crypto**: add key type 'dh' (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* [[`0ac124b6b9`](https://github.com/nodejs/node/commit/0ac124b6b9)] - **deps**: upgrade npm to 6.13.7 (Michael Perrotte) [#31558](https://github.com/nodejs/node/pull/31558) +* [[`bf7097c77d`](https://github.com/nodejs/node/commit/bf7097c77d)] - **deps**: switch to chromium's zlib implementation (Brian White) [#31201](https://github.com/nodejs/node/pull/31201) +* [[`2eeaa5ce40`](https://github.com/nodejs/node/commit/2eeaa5ce40)] - **deps**: uvwasi: cherry-pick 7b5b6f9 (cjihrig) [#31495](https://github.com/nodejs/node/pull/31495) +* [[`464f4afa66`](https://github.com/nodejs/node/commit/464f4afa66)] - **deps**: upgrade to libuv 1.34.2 (cjihrig) [#31477](https://github.com/nodejs/node/pull/31477) +* [[`9811ebe0c5`](https://github.com/nodejs/node/commit/9811ebe0c5)] - **deps**: uvwasi: cherry-pick eea4508 (cjihrig) [#31432](https://github.com/nodejs/node/pull/31432) +* [[`2fe0ed3a2e`](https://github.com/nodejs/node/commit/2fe0ed3a2e)] - **deps**: uvwasi: cherry-pick c3bef8e (cjihrig) [#31432](https://github.com/nodejs/node/pull/31432) +* [[`09566be899`](https://github.com/nodejs/node/commit/09566be899)] - **deps**: uvwasi: cherry-pick ea73af5 (cjihrig) [#31432](https://github.com/nodejs/node/pull/31432) +* [[`04f2799ed2`](https://github.com/nodejs/node/commit/04f2799ed2)] - **deps**: update to uvwasi 0.0.5 (cjihrig) [#31432](https://github.com/nodejs/node/pull/31432) +* [[`7c4f1ed030`](https://github.com/nodejs/node/commit/7c4f1ed030)] - **deps**: uvwasi: cherry-pick 941bedf (cjihrig) [#31363](https://github.com/nodejs/node/pull/31363) +* [[`00e38a749a`](https://github.com/nodejs/node/commit/00e38a749a)] - **deps**: port uvwasi@676ba9a to gyp (cjihrig) [#31363](https://github.com/nodejs/node/pull/31363) +* [[`5bd3f6c258`](https://github.com/nodejs/node/commit/5bd3f6c258)] - **deps,test**: update to uvwasi 0.0.4 (cjihrig) [#31363](https://github.com/nodejs/node/pull/31363) +* [[`2cd8461e56`](https://github.com/nodejs/node/commit/2cd8461e56)] - **doc**: add glossary.md (gengjiawen) [#27517](https://github.com/nodejs/node/pull/27517) +* [[`c4613c6b8b`](https://github.com/nodejs/node/commit/c4613c6b8b)] - **doc**: add prerequisites information for Arch (Ujjwal Sharma) [#31669](https://github.com/nodejs/node/pull/31669) +* [[`b35f83e69b`](https://github.com/nodejs/node/commit/b35f83e69b)] - **doc**: fix typo on fs docs (Juan José Arboleda) [#31620](https://github.com/nodejs/node/pull/31620) +* [[`2ff812ca84`](https://github.com/nodejs/node/commit/2ff812ca84)] - **doc**: update contact email for @ryzokuken (Ujjwal Sharma) [#31670](https://github.com/nodejs/node/pull/31670) +* [[`2c83946757`](https://github.com/nodejs/node/commit/2c83946757)] - **doc**: fix default server timeout description for https (Andrey Pechkurov) [#31692](https://github.com/nodejs/node/pull/31692) +* [[`b56a21fdad`](https://github.com/nodejs/node/commit/b56a21fdad)] - **doc**: add directions to mark a release line as lts (Danielle Adams) [#31724](https://github.com/nodejs/node/pull/31724) +* [[`5ae40cd2b2`](https://github.com/nodejs/node/commit/5ae40cd2b2)] - **doc**: expand C++ README with information about exception handling (Anna Henningsen) [#31720](https://github.com/nodejs/node/pull/31720) +* [[`94a0ec1b99`](https://github.com/nodejs/node/commit/94a0ec1b99)] - **doc**: update foundation name in onboarding (Tobias Nießen) [#31719](https://github.com/nodejs/node/pull/31719) +* [[`fda97fa772`](https://github.com/nodejs/node/commit/fda97fa772)] - **doc**: reword possessive form of Node.js in zlib.md (Rich Trott) [#31713](https://github.com/nodejs/node/pull/31713) +* [[`eea58cd3d5`](https://github.com/nodejs/node/commit/eea58cd3d5)] - **doc**: reword possessive form of Node.js in modules.md (Rich Trott) [#31713](https://github.com/nodejs/node/pull/31713) +* [[`d0238190a1`](https://github.com/nodejs/node/commit/d0238190a1)] - **doc**: reword possessive form of Node.js in repl.md (Rich Trott) [#31713](https://github.com/nodejs/node/pull/31713) +* [[`55a25b3bbe`](https://github.com/nodejs/node/commit/55a25b3bbe)] - **doc**: reword section title in addons.md (Rich Trott) [#31713](https://github.com/nodejs/node/pull/31713) +* [[`ba9fae058a`](https://github.com/nodejs/node/commit/ba9fae058a)] - **doc**: revise deepEqual() legacy assertion mode text (Rich Trott) [#31704](https://github.com/nodejs/node/pull/31704) +* [[`f6d78f959f`](https://github.com/nodejs/node/commit/f6d78f959f)] - **doc**: improve strict assertion mode color text (Rich Trott) [#31703](https://github.com/nodejs/node/pull/31703) +* [[`22cf3e3d4e`](https://github.com/nodejs/node/commit/22cf3e3d4e)] - **doc**: consolidate introductory text (Rich Trott) [#31667](https://github.com/nodejs/node/pull/31667) +* [[`1e2327d9e6`](https://github.com/nodejs/node/commit/1e2327d9e6)] - **doc**: simplify async\_hooks overview (Rich Trott) [#31660](https://github.com/nodejs/node/pull/31660) +* [[`77ec381ea2`](https://github.com/nodejs/node/commit/77ec381ea2)] - **doc**: clarify Worker exit/message event ordering (Anna Henningsen) [#31642](https://github.com/nodejs/node/pull/31642) +* [[`4b0085c7e3`](https://github.com/nodejs/node/commit/4b0085c7e3)] - **doc**: update TSC name in "Release Process" (Tobias Nießen) [#31652](https://github.com/nodejs/node/pull/31652) +* [[`2e6c737281`](https://github.com/nodejs/node/commit/2e6c737281)] - **doc**: remove .github/ISSUE\_TEMPLATE.md in favor of the template folder (Joyee Cheung) [#31656](https://github.com/nodejs/node/pull/31656) +* [[`b61b85ccf9`](https://github.com/nodejs/node/commit/b61b85ccf9)] - **doc**: add note in BUILDING.md about running `make distclean` (Swagat Konchada) [#31542](https://github.com/nodejs/node/pull/31542) +* [[`2991e7c0e3`](https://github.com/nodejs/node/commit/2991e7c0e3)] - **doc**: correct getting an ArrayBuffer's length (tsabolov) [#31632](https://github.com/nodejs/node/pull/31632) +* [[`e27f24987e`](https://github.com/nodejs/node/commit/e27f24987e)] - **doc**: ask more questions in the bug report template (Joyee Cheung) [#31611](https://github.com/nodejs/node/pull/31611) +* [[`b50a6cc54d`](https://github.com/nodejs/node/commit/b50a6cc54d)] - **doc**: add example to fs.promises.readdir (Conor ONeill) [#31552](https://github.com/nodejs/node/pull/31552) +* [[`1dbe765b0b`](https://github.com/nodejs/node/commit/1dbe765b0b)] - **doc**: add AsyncResource + Worker pool example (Anna Henningsen) [#31601](https://github.com/nodejs/node/pull/31601) +* [[`f40264980e`](https://github.com/nodejs/node/commit/f40264980e)] - **doc**: fix numbering (Steffen) [#31575](https://github.com/nodejs/node/pull/31575) +* [[`3ba0a22c57`](https://github.com/nodejs/node/commit/3ba0a22c57)] - **doc**: clarify socket.setNoDelay() explanation (Rusty Conover) [#31541](https://github.com/nodejs/node/pull/31541) +* [[`faec87b7f1`](https://github.com/nodejs/node/commit/faec87b7f1)] - **doc**: list largepage values in --help (cjihrig) [#31537](https://github.com/nodejs/node/pull/31537) +* [[`2638110cce`](https://github.com/nodejs/node/commit/2638110cce)] - **doc**: clarify require() OS independence (Denys Otrishko) [#31571](https://github.com/nodejs/node/pull/31571) +* [[`7fe9d5ebd4`](https://github.com/nodejs/node/commit/7fe9d5ebd4)] - **doc**: add protocol option in http2.connect() (Michael Lumish) [#31560](https://github.com/nodejs/node/pull/31560) +* [[`6626c4de3c`](https://github.com/nodejs/node/commit/6626c4de3c)] - **doc**: clarify that `v8.serialize()` is not deterministic (Anna Henningsen) [#31548](https://github.com/nodejs/node/pull/31548) +* [[`cde4b51a92`](https://github.com/nodejs/node/commit/cde4b51a92)] - **doc**: update job reference in COLLABORATOR\_GUIDE.md (Richard Lau) [#31557](https://github.com/nodejs/node/pull/31557) +* [[`4cac2cccd6`](https://github.com/nodejs/node/commit/4cac2cccd6)] - **doc**: simultaneous blog and email of sec announce (Sam Roberts) [#31483](https://github.com/nodejs/node/pull/31483) +* [[`e2b3e4e0e3`](https://github.com/nodejs/node/commit/e2b3e4e0e3)] - **doc**: update collaborator guide citgm instructions (Robert Nagy) [#31549](https://github.com/nodejs/node/pull/31549) +* [[`43186e0046`](https://github.com/nodejs/node/commit/43186e0046)] - **doc**: change error message testing policy (Tobias Nießen) [#31421](https://github.com/nodejs/node/pull/31421) +* [[`a52df55b9a`](https://github.com/nodejs/node/commit/a52df55b9a)] - **doc**: remove redundant properties from headers (XhmikosR) [#31492](https://github.com/nodejs/node/pull/31492) +* [[`04d783ae71`](https://github.com/nodejs/node/commit/04d783ae71)] - **doc**: update maintaining-V8.md (kenzo-spaulding) [#31503](https://github.com/nodejs/node/pull/31503) +* [[`f75fe9ab71`](https://github.com/nodejs/node/commit/f75fe9ab71)] - **doc**: enable visual code indication in headers (Rich Trott) [#31493](https://github.com/nodejs/node/pull/31493) +* [[`8f25e51e4e`](https://github.com/nodejs/node/commit/8f25e51e4e)] - **doc**: clean up and streamline vm.md examples (Denys Otrishko) [#31474](https://github.com/nodejs/node/pull/31474) +* [[`729b96137e`](https://github.com/nodejs/node/commit/729b96137e)] - **doc**: further fix async iterator example (Robert Nagy) [#31367](https://github.com/nodejs/node/pull/31367) +* [[`15b24b71ce`](https://github.com/nodejs/node/commit/15b24b71ce)] - **doc**: add ronag to collaborators (Robert Nagy) [#31498](https://github.com/nodejs/node/pull/31498) +* [[`e9462b4d44`](https://github.com/nodejs/node/commit/e9462b4d44)] - **doc**: fix code display in header glitch (Rich Trott) [#31460](https://github.com/nodejs/node/pull/31460) +* [[`b1c745877b`](https://github.com/nodejs/node/commit/b1c745877b)] - **doc**: fix syntax in N-API documentation (Tobias Nießen) [#31466](https://github.com/nodejs/node/pull/31466) +* [[`67d8967f98`](https://github.com/nodejs/node/commit/67d8967f98)] - **doc**: add explanatory to path.resolve description (Yakov Litvin) [#31430](https://github.com/nodejs/node/pull/31430) +* [[`1099524452`](https://github.com/nodejs/node/commit/1099524452)] - **doc**: document process.std\*.fd (Harshitha KP) [#31395](https://github.com/nodejs/node/pull/31395) +* [[`843c5c6f46`](https://github.com/nodejs/node/commit/843c5c6f46)] - **doc**: fix several child\_process doc typos (cjihrig) [#31393](https://github.com/nodejs/node/pull/31393) +* [[`d77099856a`](https://github.com/nodejs/node/commit/d77099856a)] - **doc**: fix a broken link in fs.md (himself65) [#31373](https://github.com/nodejs/node/pull/31373) +* [[`1e08d3c2f1`](https://github.com/nodejs/node/commit/1e08d3c2f1)] - **doc**: correct added version for --abort-on-uncaught-exception (Anna Henningsen) [#31360](https://github.com/nodejs/node/pull/31360) +* [[`6055134db6`](https://github.com/nodejs/node/commit/6055134db6)] - **doc**: explain `hex` encoding in Buffer API (Harshitha KP) [#31352](https://github.com/nodejs/node/pull/31352) +* [[`bd54abe3f7`](https://github.com/nodejs/node/commit/bd54abe3f7)] - **doc**: explain \_writev() API (Harshitha KP) [#31356](https://github.com/nodejs/node/pull/31356) +* [[`91f5e9b0f7`](https://github.com/nodejs/node/commit/91f5e9b0f7)] - **doc**: document missing properties in child\_process (Harshitha KP) [#31342](https://github.com/nodejs/node/pull/31342) +* [[`6874deef28`](https://github.com/nodejs/node/commit/6874deef28)] - **doc,assert**: rename "mode" to "assertion mode" (Rich Trott) [#31635](https://github.com/nodejs/node/pull/31635) +* [[`788ea36ce0`](https://github.com/nodejs/node/commit/788ea36ce0)] - **doc,net**: reword Unix domain path paragraph in net.md (Rich Trott) [#31684](https://github.com/nodejs/node/pull/31684) +* [[`e3e40a12b0`](https://github.com/nodejs/node/commit/e3e40a12b0)] - **doc,util**: revise util.md introductory paragraph (Rich Trott) [#31685](https://github.com/nodejs/node/pull/31685) +* [[`e46cfaf146`](https://github.com/nodejs/node/commit/e46cfaf146)] - **errors**: make use of "cannot" consistent (Tobias Nießen) [#31420](https://github.com/nodejs/node/pull/31420) +* [[`f6392e9fde`](https://github.com/nodejs/node/commit/f6392e9fde)] - **esm**: import.meta.resolve with nodejs: builtins (Guy Bedford) [#31032](https://github.com/nodejs/node/pull/31032) +* [[`21fc81821f`](https://github.com/nodejs/node/commit/21fc81821f)] - **fs**: set path when mkdir recursive called on file (bcoe) [#31607](https://github.com/nodejs/node/pull/31607) +* [[`8669ecc8a2`](https://github.com/nodejs/node/commit/8669ecc8a2)] - **fs**: bail on permission error in recursive directory creation (bcoe) [#31505](https://github.com/nodejs/node/pull/31505) +* [[`2c2b3ba39c`](https://github.com/nodejs/node/commit/2c2b3ba39c)] - **fs**: do not emit 'close' twice if emitClose enabled (Robert Nagy) [#31383](https://github.com/nodejs/node/pull/31383) +* [[`32ac1be372`](https://github.com/nodejs/node/commit/32ac1be372)] - **fs**: unset FileHandle fd after close (Anna Henningsen) [#31389](https://github.com/nodejs/node/pull/31389) +* [[`9ecae58643`](https://github.com/nodejs/node/commit/9ecae58643)] - **lib**: delete dead code in SourceMap (Justin Ridgewell) [#31512](https://github.com/nodejs/node/pull/31512) +* [[`7ecf842429`](https://github.com/nodejs/node/commit/7ecf842429)] - **lib,src**: switch Buffer::kMaxLength to size\_t (Ben Noordhuis) [#31406](https://github.com/nodejs/node/pull/31406) +* [[`15c8d9ead1`](https://github.com/nodejs/node/commit/15c8d9ead1)] - **meta**: move princejwesley to emeritus (Rich Trott) [#31730](https://github.com/nodejs/node/pull/31730) +* [[`f5ae510e03`](https://github.com/nodejs/node/commit/f5ae510e03)] - **meta**: move vkurchatkin to emeritus (Rich Trott) [#31729](https://github.com/nodejs/node/pull/31729) +* [[`cd520ddfef`](https://github.com/nodejs/node/commit/cd520ddfef)] - **meta**: move calvinmetcalf to emeritus (Rich Trott) [#31736](https://github.com/nodejs/node/pull/31736) +* [[`832255df89`](https://github.com/nodejs/node/commit/832255df89)] - **meta**: fix collaborator list errors in README.md (James M Snell) [#31655](https://github.com/nodejs/node/pull/31655) +* [[`aa266628ba`](https://github.com/nodejs/node/commit/aa266628ba)] - **module**: drop support for extensionless main entry points in esm (Geoffrey Booth) [#31415](https://github.com/nodejs/node/pull/31415) +* [[`ca81af7d73`](https://github.com/nodejs/node/commit/ca81af7d73)] - **module**: correct docs about when extensionless files are supported (Geoffrey Booth) [#31415](https://github.com/nodejs/node/pull/31415) +* [[`6797656d86`](https://github.com/nodejs/node/commit/6797656d86)] - **module**: revert #31021 (Geoffrey Booth) [#31415](https://github.com/nodejs/node/pull/31415) +* [[`ae2141effc`](https://github.com/nodejs/node/commit/ae2141effc)] - **n-api**: free instance data as reference (Gabriel Schulhof) [#31638](https://github.com/nodejs/node/pull/31638) +* [[`c8215699ab`](https://github.com/nodejs/node/commit/c8215699ab)] - **n-api**: rename 'promise' parameter to 'value' (Tobias Nießen) [#31544](https://github.com/nodejs/node/pull/31544) +* [[`5982726ef9`](https://github.com/nodejs/node/commit/5982726ef9)] - **net**: track state of setNoDelay() and prevent unnecessary system calls (Rusty Conover) [#31543](https://github.com/nodejs/node/pull/31543) +* [[`e7fea14c7b`](https://github.com/nodejs/node/commit/e7fea14c7b)] - **(SEMVER-MINOR)** **perf_hooks**: add property flags to GCPerformanceEntry (Kirill Fomichev) [#29547](https://github.com/nodejs/node/pull/29547) +* [[`672315651d`](https://github.com/nodejs/node/commit/672315651d)] - **(SEMVER-MINOR)** **process**: report ArrayBuffer memory in `memoryUsage()` (Anna Henningsen) [#31550](https://github.com/nodejs/node/pull/31550) +* [[`cd754337f8`](https://github.com/nodejs/node/commit/cd754337f8)] - **process**: fix two overflow cases in SourceMap VLQ decoding (Justin Ridgewell) [#31490](https://github.com/nodejs/node/pull/31490) +* [[`98f3028c30`](https://github.com/nodejs/node/commit/98f3028c30)] - **readline**: remove intermediate variable (cjihrig) [#31676](https://github.com/nodejs/node/pull/31676) +* [[`148dfde1d4`](https://github.com/nodejs/node/commit/148dfde1d4)] - **(SEMVER-MINOR)** **readline**: make tab size configurable (Ruben Bridgewater) [#31318](https://github.com/nodejs/node/pull/31318) +* [[`1bcf2f9423`](https://github.com/nodejs/node/commit/1bcf2f9423)] - **report**: add support for Workers (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`7c2d33f38f`](https://github.com/nodejs/node/commit/7c2d33f38f)] - **src**: use hex not decimal in IsArrayIndex (Shelley Vohr) [#31758](https://github.com/nodejs/node/pull/31758) +* [[`a095ef0d52`](https://github.com/nodejs/node/commit/a095ef0d52)] - **src**: keep main-thread Isolate attached to platform during Dispose (Anna Henningsen) [#31795](https://github.com/nodejs/node/pull/31795) +* [[`1dec9d196f`](https://github.com/nodejs/node/commit/1dec9d196f)] - **src**: wrap HostPort in ExclusiveAccess (Ben Noordhuis) [#31717](https://github.com/nodejs/node/pull/31717) +* [[`e23023d685`](https://github.com/nodejs/node/commit/e23023d685)] - **src**: add ExclusiveAccess class (Ben Noordhuis) [#31717](https://github.com/nodejs/node/pull/31717) +* [[`54caf76210`](https://github.com/nodejs/node/commit/54caf76210)] - **src**: allow to reuse env options handling (Denys Otrishko) [#31711](https://github.com/nodejs/node/pull/31711) +* [[`6ad8ca5ecf`](https://github.com/nodejs/node/commit/6ad8ca5ecf)] - **src**: do not unnecessarily re-assign uv handle data (Anna Henningsen) [#31696](https://github.com/nodejs/node/pull/31696) +* [[`2837788849`](https://github.com/nodejs/node/commit/2837788849)] - **src**: fix compile warnings in node\_url.cc (Anna Henningsen) [#31689](https://github.com/nodejs/node/pull/31689) +* [[`1d34ab5e43`](https://github.com/nodejs/node/commit/1d34ab5e43)] - **src**: modernized unique\_ptr construction (Yuhanun Citgez) [#31654](https://github.com/nodejs/node/pull/31654) +* [[`0e44902b85`](https://github.com/nodejs/node/commit/0e44902b85)] - **src**: remove dead code in InternalMakeCallback (Gerhard Stoebich) [#31622](https://github.com/nodejs/node/pull/31622) +* [[`348c7871b6`](https://github.com/nodejs/node/commit/348c7871b6)] - **src**: remove fixed-size GetHumanReadableProcessName (Ben Noordhuis) [#31633](https://github.com/nodejs/node/pull/31633) +* [[`8964077935`](https://github.com/nodejs/node/commit/8964077935)] - **src**: fix OOB reads in process.title getter (Ben Noordhuis) [#31633](https://github.com/nodejs/node/pull/31633) +* [[`af612bcc21`](https://github.com/nodejs/node/commit/af612bcc21)] - **src**: various minor improvements to node\_url (James M Snell) [#31651](https://github.com/nodejs/node/pull/31651) +* [[`f0ffa4cb80`](https://github.com/nodejs/node/commit/f0ffa4cb80)] - **src**: fix inspecting `MessagePort` from `init` async hook (Anna Henningsen) [#31600](https://github.com/nodejs/node/pull/31600) +* [[`425662e2d6`](https://github.com/nodejs/node/commit/425662e2d6)] - **src**: remove unused `Worker::child\_port\_` member (Anna Henningsen) [#31599](https://github.com/nodejs/node/pull/31599) +* [[`43e2c2e643`](https://github.com/nodejs/node/commit/43e2c2e643)] - **src**: change Fill() to use ParseArrayIndex() (ConorDavenport) [#31591](https://github.com/nodejs/node/pull/31591) +* [[`42b835412d`](https://github.com/nodejs/node/commit/42b835412d)] - **src**: remove duplicate field env in CryptoJob class (ConorDavenport) [#31554](https://github.com/nodejs/node/pull/31554) +* [[`9fd1e717e6`](https://github.com/nodejs/node/commit/9fd1e717e6)] - **src**: fix console debug output on Windows (Denys Otrishko) [#31580](https://github.com/nodejs/node/pull/31580) +* [[`277980d288`](https://github.com/nodejs/node/commit/277980d288)] - **src**: use \_\_executable\_start for linux hugepages (Ben Noordhuis) [#31547](https://github.com/nodejs/node/pull/31547) +* [[`6d5c3cd7ac`](https://github.com/nodejs/node/commit/6d5c3cd7ac)] - **src**: remove preview for heap dump utilities (Anna Henningsen) [#31570](https://github.com/nodejs/node/pull/31570) +* [[`c167ae0a87`](https://github.com/nodejs/node/commit/c167ae0a87)] - **src**: fix minor typo in base\_object.h (Daniel Bevenius) [#31535](https://github.com/nodejs/node/pull/31535) +* [[`f04576ede0`](https://github.com/nodejs/node/commit/f04576ede0)] - **src**: fix debug crash handling null strings (Rusty Conover) [#31523](https://github.com/nodejs/node/pull/31523) +* [[`ef4d081660`](https://github.com/nodejs/node/commit/ef4d081660)] - **src**: simplify native immediate queue running (Anna Henningsen) [#31502](https://github.com/nodejs/node/pull/31502) +* [[`bc0c1420f0`](https://github.com/nodejs/node/commit/bc0c1420f0)] - **src**: define noreturn attribute for windows (Alexander Smarus) [#31467](https://github.com/nodejs/node/pull/31467) +* [[`9e9dbd44fe`](https://github.com/nodejs/node/commit/9e9dbd44fe)] - **src**: reduce code duplication in BootstrapNode (Denys Otrishko) [#31465](https://github.com/nodejs/node/pull/31465) +* [[`76aad0e5e1`](https://github.com/nodejs/node/commit/76aad0e5e1)] - **src**: use custom fprintf alike to write errors to stderr (Anna Henningsen) [#31446](https://github.com/nodejs/node/pull/31446) +* [[`a685827a55`](https://github.com/nodejs/node/commit/a685827a55)] - **src**: add C++-style sprintf utility (Anna Henningsen) [#31446](https://github.com/nodejs/node/pull/31446) +* [[`049a1727d4`](https://github.com/nodejs/node/commit/049a1727d4)] - **src**: harden running native `SetImmediate()`s slightly (Anna Henningsen) [#31468](https://github.com/nodejs/node/pull/31468) +* [[`f56de5a3b4`](https://github.com/nodejs/node/commit/f56de5a3b4)] - **src**: move MemoryInfo() for worker code to .cc files (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`0cacc1facf`](https://github.com/nodejs/node/commit/0cacc1facf)] - **src**: add interrupts to Environments/Workers (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`f8c45b277f`](https://github.com/nodejs/node/commit/f8c45b277f)] - **src**: remove AsyncRequest (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`600e96ec04`](https://github.com/nodejs/node/commit/600e96ec04)] - **src**: add a threadsafe variant of SetImmediate() (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`74a7cdbe05`](https://github.com/nodejs/node/commit/74a7cdbe05)] - **src**: exclude C++ SetImmediate() from count (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`53e566bc50`](https://github.com/nodejs/node/commit/53e566bc50)] - **src**: better encapsulate native immediate list (Anna Henningsen) [#31386](https://github.com/nodejs/node/pull/31386) +* [[`b8face28e7`](https://github.com/nodejs/node/commit/b8face28e7)] - **src**: reduce large pages code duplication (Gabriel Schulhof) [#31385](https://github.com/nodejs/node/pull/31385) +* [[`83dd65a469`](https://github.com/nodejs/node/commit/83dd65a469)] - **src**: fix ignore GCC -Wcast-function-type for older compilers (Denys Otrishko) [#31524](https://github.com/nodejs/node/pull/31524) +* [[`13c6965703`](https://github.com/nodejs/node/commit/13c6965703)] - **src**: ignore GCC -Wcast-function-type for v8.h (Daniel Bevenius) [#31475](https://github.com/nodejs/node/pull/31475) +* [[`3dd4089b9a`](https://github.com/nodejs/node/commit/3dd4089b9a)] - **(SEMVER-MINOR)** **src,lib**: make ^C print a JS stack trace (legendecas) [#29207](https://github.com/nodejs/node/pull/29207) +* [[`6d0b2267ce`](https://github.com/nodejs/node/commit/6d0b2267ce)] - **stream**: fix finished w/ 'close' before 'finish' (Robert Nagy) [#31534](https://github.com/nodejs/node/pull/31534) +* [[`80e75ab389`](https://github.com/nodejs/node/commit/80e75ab389)] - **stream**: add regression test for async iteration completion (Matteo Collina) [#31508](https://github.com/nodejs/node/pull/31508) +* [[`538582b43d`](https://github.com/nodejs/node/commit/538582b43d)] - ***Revert*** "**stream**: fix async iterator destroyed error propagation" (Matteo Collina) [#31508](https://github.com/nodejs/node/pull/31508) +* [[`f255053033`](https://github.com/nodejs/node/commit/f255053033)] - **stream**: fix finished writable/readable state (Robert Nagy) [#31527](https://github.com/nodejs/node/pull/31527) +* [[`3046648580`](https://github.com/nodejs/node/commit/3046648580)] - **stream**: implement throw for async iterator (Robert Nagy) [#31316](https://github.com/nodejs/node/pull/31316) +* [[`5a95fa4aeb`](https://github.com/nodejs/node/commit/5a95fa4aeb)] - **stream**: normalize async iterator stream destroy (Robert Nagy) [#31316](https://github.com/nodejs/node/pull/31316) +* [[`20d0a0e9a7`](https://github.com/nodejs/node/commit/20d0a0e9a7)] - **stream**: add async iterator support for v1 streams (Robert Nagy) [#31316](https://github.com/nodejs/node/pull/31316) +* [[`0654e6790d`](https://github.com/nodejs/node/commit/0654e6790d)] - **test**: mark test-fs-stat-bigint flaky on FreeBSD (Rich Trott) [#31728](https://github.com/nodejs/node/pull/31728) +* [[`6dbe6bde56`](https://github.com/nodejs/node/commit/6dbe6bde56)] - **test**: fix flaky parallel/test-repl-history-navigation test (Ruben Bridgewater) [#31708](https://github.com/nodejs/node/pull/31708) +* [[`1dae7dc6bc`](https://github.com/nodejs/node/commit/1dae7dc6bc)] - **test**: improve test-fs-stat-bigint (Rich Trott) [#31726](https://github.com/nodejs/node/pull/31726) +* [[`fa9b59276d`](https://github.com/nodejs/node/commit/fa9b59276d)] - **test**: remove flaky designation for test-fs-stat-bigint (Rich Trott) [#30437](https://github.com/nodejs/node/pull/30437) +* [[`d36ba2b555`](https://github.com/nodejs/node/commit/d36ba2b555)] - **test**: fix flaky test-fs-stat-bigint (Duncan Healy) [#30437](https://github.com/nodejs/node/pull/30437) +* [[`5b3c4b3e7d`](https://github.com/nodejs/node/commit/5b3c4b3e7d)] - ***Revert*** "**test**: refactor all benchmark tests to use the new test option" (Anna Henningsen) [#31722](https://github.com/nodejs/node/pull/31722) +* [[`2c0f3028c9`](https://github.com/nodejs/node/commit/2c0f3028c9)] - **test**: add debugging output to test-net-listen-after-destroy-stdin (Rich Trott) [#31698](https://github.com/nodejs/node/pull/31698) +* [[`2224211609`](https://github.com/nodejs/node/commit/2224211609)] - **test**: improve assertion message in test-dns-any (Rich Trott) [#31697](https://github.com/nodejs/node/pull/31697) +* [[`b0e37b7180`](https://github.com/nodejs/node/commit/b0e37b7180)] - **test**: fix flaky test-trace-sigint-on-idle (Anna Henningsen) [#31645](https://github.com/nodejs/node/pull/31645) +* [[`58f17c0e6b`](https://github.com/nodejs/node/commit/58f17c0e6b)] - **test**: stricter assert color test (Ruben Bridgewater) [#31429](https://github.com/nodejs/node/pull/31429) +* [[`89dcf733c6`](https://github.com/nodejs/node/commit/89dcf733c6)] - **test**: improve logged errors (Ruben Bridgewater) [#31425](https://github.com/nodejs/node/pull/31425) +* [[`4878c7a197`](https://github.com/nodejs/node/commit/4878c7a197)] - **test**: refactor all benchmark tests to use the new test option (Ruben Bridgewater) [#31396](https://github.com/nodejs/node/pull/31396) +* [[`3bcc2da887`](https://github.com/nodejs/node/commit/3bcc2da887)] - **test**: fix test-benchmark-http (Rich Trott) [#31686](https://github.com/nodejs/node/pull/31686) +* [[`6139d4ea3b`](https://github.com/nodejs/node/commit/6139d4ea3b)] - **test**: fix flaky test-inspector-connect-main-thread (Anna Henningsen) [#31637](https://github.com/nodejs/node/pull/31637) +* [[`13c256d31d`](https://github.com/nodejs/node/commit/13c256d31d)] - **test**: add test-dns-promises-lookupService (Rich Trott) [#31640](https://github.com/nodejs/node/pull/31640) +* [[`23fefba84c`](https://github.com/nodejs/node/commit/23fefba84c)] - **test**: fix flaky test-http2-stream-destroy-event-order (Anna Henningsen) [#31610](https://github.com/nodejs/node/pull/31610) +* [[`435b9c977a`](https://github.com/nodejs/node/commit/435b9c977a)] - **test**: abstract common assertions in readline-interface test (Ruben Bridgewater) [#31423](https://github.com/nodejs/node/pull/31423) +* [[`d2a12d3af8`](https://github.com/nodejs/node/commit/d2a12d3af8)] - **test**: refactor test-readline-interface.js (Ruben Bridgewater) [#31423](https://github.com/nodejs/node/pull/31423) +* [[`7c3cc94b9f`](https://github.com/nodejs/node/commit/7c3cc94b9f)] - **test**: unset NODE\_OPTIONS for cctest (Anna Henningsen) [#31594](https://github.com/nodejs/node/pull/31594) +* [[`62d0c6029d`](https://github.com/nodejs/node/commit/62d0c6029d)] - **test**: simplify test-https-simple.js (Sam Roberts) [#31584](https://github.com/nodejs/node/pull/31584) +* [[`49be50051c`](https://github.com/nodejs/node/commit/49be50051c)] - **test**: show child stderr output in largepages test (Ben Noordhuis) [#31612](https://github.com/nodejs/node/pull/31612) +* [[`c3247fedd9`](https://github.com/nodejs/node/commit/c3247fedd9)] - **test**: mark additional tests as flaky on Windows (Anna Henningsen) [#31606](https://github.com/nodejs/node/pull/31606) +* [[`3fdec1c790`](https://github.com/nodejs/node/commit/3fdec1c790)] - **test**: fix flaky test-memory-usage (Anna Henningsen) [#31602](https://github.com/nodejs/node/pull/31602) +* [[`23da559ab2`](https://github.com/nodejs/node/commit/23da559ab2)] - **test**: verify threadId in reports (Dylan Coakley) [#31556](https://github.com/nodejs/node/pull/31556) +* [[`5a12cd636b`](https://github.com/nodejs/node/commit/5a12cd636b)] - **test**: remove --experimental-worker flag comment (Harshitha KP) [#31563](https://github.com/nodejs/node/pull/31563) +* [[`07525c317e`](https://github.com/nodejs/node/commit/07525c317e)] - **test**: make test-http2-buffersize more correct (Anna Henningsen) [#31502](https://github.com/nodejs/node/pull/31502) +* [[`c4a2f94a11`](https://github.com/nodejs/node/commit/c4a2f94a11)] - **test**: cover property n-api null cases (Gabriel Schulhof) [#31488](https://github.com/nodejs/node/pull/31488) +* [[`f2dc694805`](https://github.com/nodejs/node/commit/f2dc694805)] - **test**: fix test-heapdump-worker (Anna Henningsen) [#31494](https://github.com/nodejs/node/pull/31494) +* [[`b25ea9b1dc`](https://github.com/nodejs/node/commit/b25ea9b1dc)] - **test**: add tests for main() argument handling (cjihrig) [#31426](https://github.com/nodejs/node/pull/31426) +* [[`38ea53629b`](https://github.com/nodejs/node/commit/38ea53629b)] - **test**: add wasi test for freopen() (cjihrig) [#31432](https://github.com/nodejs/node/pull/31432) +* [[`c2792aad44`](https://github.com/nodejs/node/commit/c2792aad44)] - **test**: remove bluebird remnants from test fixture (Rich Trott) [#31435](https://github.com/nodejs/node/pull/31435) +* [[`583d1d9f55`](https://github.com/nodejs/node/commit/583d1d9f55)] - **test**: improve wasi stat test (cjihrig) [#31413](https://github.com/nodejs/node/pull/31413) +* [[`676b84a803`](https://github.com/nodejs/node/commit/676b84a803)] - **(SEMVER-MINOR)** **test**: skip keygen tests on arm systems (Tobias Nießen) [#31178](https://github.com/nodejs/node/pull/31178) +* [[`099c921f40`](https://github.com/nodejs/node/commit/099c921f40)] - **test**: add wasi test for symlink() and readlink() (cjihrig) [#31403](https://github.com/nodejs/node/pull/31403) +* [[`6256d0ae92`](https://github.com/nodejs/node/commit/6256d0ae92)] - **test**: update postmortem test with v12 constants (Matheus Marchini) [#31391](https://github.com/nodejs/node/pull/31391) +* [[`0bafb5c8c8`](https://github.com/nodejs/node/commit/0bafb5c8c8)] - **test**: export public symbols in addons tests (Ben Noordhuis) [#28717](https://github.com/nodejs/node/pull/28717) +* [[`6833f62e9d`](https://github.com/nodejs/node/commit/6833f62e9d)] - **test**: add promises metadata to postmortem test (Matheus Marchini) [#31357](https://github.com/nodejs/node/pull/31357) +* [[`41524282b5`](https://github.com/nodejs/node/commit/41524282b5)] - **test,benchmark**: fix test-benchmark-zlib (Rich Trott) [#31538](https://github.com/nodejs/node/pull/31538) +* [[`c34872e464`](https://github.com/nodejs/node/commit/c34872e464)] - **test,dns**: add coverage for dns exception (Rich Trott) [#31678](https://github.com/nodejs/node/pull/31678) +* [[`03aac4e65d`](https://github.com/nodejs/node/commit/03aac4e65d)] - **tls**: simplify errors using ThrowCryptoError (Tobias Nießen) [#31436](https://github.com/nodejs/node/pull/31436) +* [[`95d509e974`](https://github.com/nodejs/node/commit/95d509e974)] - **tools**: update Markdown linter to be cross-platform (Derek Lewis) [#31239](https://github.com/nodejs/node/pull/31239) +* [[`328b8a6444`](https://github.com/nodejs/node/commit/328b8a6444)] - **tools**: unify make-v8.sh for ppc64le and s390x (Richard Lau) [#31628](https://github.com/nodejs/node/pull/31628) +* [[`39c86bbe4c`](https://github.com/nodejs/node/commit/39c86bbe4c)] - **tools**: replace deprecated iteritems() for items() (Giovanny Andres Gongora Granada (Gioyik)) [#31528](https://github.com/nodejs/node/pull/31528) +* [[`be55f3ec4f`](https://github.com/nodejs/node/commit/be55f3ec4f)] - **tty**: do not end in an infinite warning recursion (Ruben Bridgewater) [#31429](https://github.com/nodejs/node/pull/31429) +* [[`a0c1ceddbc`](https://github.com/nodejs/node/commit/a0c1ceddbc)] - **util**: throw if unreachable TypedArray checking code is reached (Rich Trott) [#31737](https://github.com/nodejs/node/pull/31737) +* [[`7b9d6d08f4`](https://github.com/nodejs/node/commit/7b9d6d08f4)] - **util**: add coverage for util.inspect.colors alias setter (Rich Trott) [#31743](https://github.com/nodejs/node/pull/31743) +* [[`9f9edc2c78`](https://github.com/nodejs/node/commit/9f9edc2c78)] - **util**: throw if unreachable code is reached (Rich Trott) [#31712](https://github.com/nodejs/node/pull/31712) +* [[`5e1bee817c`](https://github.com/nodejs/node/commit/5e1bee817c)] - **util**: fix inspection of typed arrays with unusual length (Ruben Bridgewater) [#31458](https://github.com/nodejs/node/pull/31458) +* [[`3da4d5174c`](https://github.com/nodejs/node/commit/3da4d5174c)] - **util**: improve unicode support (Ruben Bridgewater) [#31319](https://github.com/nodejs/node/pull/31319) +* [[`822f2ac640`](https://github.com/nodejs/node/commit/822f2ac640)] - **worker**: add support for .cjs extension (Antoine du HAMEL) [#31662](https://github.com/nodejs/node/pull/31662) +* [[`cd99dc7368`](https://github.com/nodejs/node/commit/cd99dc7368)] - **worker**: properly handle env and NODE\_OPTIONS in workers (Denys Otrishko) [#31711](https://github.com/nodejs/node/pull/31711) +* [[`1592c474da`](https://github.com/nodejs/node/commit/1592c474da)] - **worker**: reset `Isolate` stack limit after entering `Locker` (Anna Henningsen) [#31593](https://github.com/nodejs/node/pull/31593) +* [[`3e5803f91b`](https://github.com/nodejs/node/commit/3e5803f91b)] - **worker**: improve MessagePort performance (Anna Henningsen) [#31605](https://github.com/nodejs/node/pull/31605) +* [[`8d3ffbeb55`](https://github.com/nodejs/node/commit/8d3ffbeb55)] - **(SEMVER-MINOR)** **worker**: add ability to take heap snapshot from parent thread (Anna Henningsen) [#31569](https://github.com/nodejs/node/pull/31569) +* [[`6fdef457c6`](https://github.com/nodejs/node/commit/6fdef457c6)] - **worker**: remove redundant closing of child port (aaccttrr) [#31555](https://github.com/nodejs/node/pull/31555) +* [[`5656ec9f71`](https://github.com/nodejs/node/commit/5656ec9f71)] - **worker**: move JoinThread() back into exit callback (Anna Henningsen) [#31468](https://github.com/nodejs/node/pull/31468) + ## 2020-02-06, Version 13.8.0 (Current), @BethGriggs diff --git a/doc/guides/adding-new-napi-api.md b/doc/guides/adding-new-napi-api.md index dc8d9dda233..825b4877783 100644 --- a/doc/guides/adding-new-napi-api.md +++ b/doc/guides/adding-new-napi-api.md @@ -1,6 +1,6 @@ # Contributing a new API to N-API -N-API is Node.js's next generation ABI-stable API for native modules. +N-API is the next-generation ABI-stable API for native modules. While improving the API surface is encouraged and welcomed, the following are a set of principles and guidelines to keep in mind while adding a new N-API API. diff --git a/doc/guides/backporting-to-release-lines.md b/doc/guides/backporting-to-release-lines.md index 4a4657d0815..55fbcb7e5bc 100644 --- a/doc/guides/backporting-to-release-lines.md +++ b/doc/guides/backporting-to-release-lines.md @@ -75,7 +75,7 @@ replace that with the staging branch for the targeted release line. 9. Open a pull request: 1. Be sure to target the `v10.x-staging` branch in the pull request. 1. Include the backport target in the pull request title in the following - format — `[v10.x backport] `. + format: `[v10.x backport] `. Example: `[v10.x backport] process: improve performance of nextTick` 1. Check the checkbox labeled "Allow edits from maintainers". 1. In the description add a reference to the original PR. diff --git a/COLLABORATOR_GUIDE.md b/doc/guides/collaborator-guide.md similarity index 100% rename from COLLABORATOR_GUIDE.md rename to doc/guides/collaborator-guide.md diff --git a/doc/guides/contributing/coc.md b/doc/guides/contributing/code-of-conduct.md similarity index 100% rename from doc/guides/contributing/coc.md rename to doc/guides/contributing/code-of-conduct.md diff --git a/doc/guides/contributing/issues.md b/doc/guides/contributing/issues.md index 054bbd7b277..31a47c1cd33 100644 --- a/doc/guides/contributing/issues.md +++ b/doc/guides/contributing/issues.md @@ -89,8 +89,8 @@ around it. Some contributors may have differing opinions about the issue, including whether the behavior being seen is a bug or a feature. This discussion is part of the process and should be kept focused, helpful, and professional. -Short, clipped responses—that provide neither additional context nor supporting -detail—are not helpful or professional. To many, such responses are simply +Short, clipped responses that provide neither additional context nor supporting +detail are not helpful or professional. To many, such responses are simply annoying and unfriendly. Contributors are encouraged to help one another make forward progress as much diff --git a/doc/guides/contributing/pull-requests.md b/doc/guides/contributing/pull-requests.md index 7f4ab4e83e0..39b84bc34f1 100644 --- a/doc/guides/contributing/pull-requests.md +++ b/doc/guides/contributing/pull-requests.md @@ -115,13 +115,13 @@ If you are modifying code, please be sure to run `make lint` from time to time to ensure that the changes follow the Node.js code style guide. Any documentation you write (including code comments and API documentation) -should follow the [Style Guide](../../STYLE_GUIDE.md). Code samples included -in the API docs will also be checked when running `make lint` (or +should follow the [Style Guide](../doc-style-guide.md). Code samples +included in the API docs will also be checked when running `make lint` (or `vcbuild.bat lint` on Windows). If you are adding to or deprecating an API, use `REPLACEME` for the version number in the documentation YAML. For contributing C++ code, you may want to look at the -[C++ Style Guide](../../../CPP_STYLE_GUIDE.md), as well as the +[C++ Style Guide](../../cpp-style-guide.md), as well as the [README of `src/`](../../../src/README.md) for an overview over Node.js C++ internals. diff --git a/CPP_STYLE_GUIDE.md b/doc/guides/cpp-style-guide.md similarity index 100% rename from CPP_STYLE_GUIDE.md rename to doc/guides/cpp-style-guide.md diff --git a/doc/STYLE_GUIDE.md b/doc/guides/doc-style-guide.md similarity index 89% rename from doc/STYLE_GUIDE.md rename to doc/guides/doc-style-guide.md index f0221991b79..7ee2cac4ec3 100644 --- a/doc/STYLE_GUIDE.md +++ b/doc/guides/doc-style-guide.md @@ -25,12 +25,10 @@ * Outside of the wrapping element if the wrapping element contains only a fragment of a clause. * Documents must start with a level-one heading. -* Prefer affixing links to inlining links — prefer `[a link][]` to - `[a link](http://example.com)`. +* Prefer affixing links (`[a link][]`) to inlining links + (`[a link](http://example.com)`). * When documenting APIs, update the YAML comment associated with the API as appropriate. This is especially true when introducing or deprecating an API. -* Use [Em dashes][] ("—" or `Option+Shift+"-"` on macOS) surrounded by spaces, - as per [The New York Times Manual of Style and Usage][]. * For code blocks: * Use language aware fences. ("```js") * Code need not be complete. Treat code blocks as an illustration or aid to @@ -67,9 +65,7 @@ See also API documentation structure overview in [doctools README][]. -[Em dashes]: https://en.wikipedia.org/wiki/Dash#Em_dash [Javascript type]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Grammar_and_types#Data_structures_and_types [serial commas]: https://en.wikipedia.org/wiki/Serial_comma -[The New York Times Manual of Style and Usage]: https://en.wikipedia.org/wiki/The_New_York_Times_Manual_of_Style_and_Usage [plugin]: https://editorconfig.org/#download [doctools README]: ../tools/doc/README.md diff --git a/doc/guides/internal/readme.md b/doc/guides/internal-api.md similarity index 100% rename from doc/guides/internal/readme.md rename to doc/guides/internal-api.md diff --git a/doc/guides/maintaining-icu.md b/doc/guides/maintaining-icu.md index 4add40b7fa2..85f13c91298 100644 --- a/doc/guides/maintaining-icu.md +++ b/doc/guides/maintaining-icu.md @@ -218,7 +218,7 @@ following the steps above in the prior section of this document ought to be repeatable without concern for overriding a patch. 2. **Verifiability.** Given the number of files modified in an ICU PR, -a floating patch could easily be missed — or dropped altogether next time +a floating patch could easily be missed or dropped altogether next time something is landed. 3. **Compatibility.** There are a number of ways that ICU can be loaded into diff --git a/doc/guides/updating-root-certs.md b/doc/guides/maintaining-root-certs.md similarity index 99% rename from doc/guides/updating-root-certs.md rename to doc/guides/maintaining-root-certs.md index 41c83e5898b..d26bdad943a 100644 --- a/doc/guides/updating-root-certs.md +++ b/doc/guides/maintaining-root-certs.md @@ -1,4 +1,4 @@ -# Updating the Root Certificates +# Maintaining the Root Certificates Node.js contains a compiled-in set of root certificates used as trust anchors for TLS certificate validation. diff --git a/doc/guides/maintaining-zlib.md b/doc/guides/maintaining-zlib.md new file mode 100644 index 00000000000..c293fdf5d40 --- /dev/null +++ b/doc/guides/maintaining-zlib.md @@ -0,0 +1,34 @@ +# Maintaining zlib + +This copy of zlib comes from the Chromium team's zlib fork which incorporated +performance improvements not currently available in standard zlib. + +## Updating zlib + +Update zlib: +```shell +git clone https://chromium.googlesource.com/chromium/src/third_party/zlib +cp deps/zlib/zlib.gyp deps/zlib/win32/zlib.def deps +rm -rf deps/zlib zlib/.git +mv zlib deps/ +mv deps/zlib.gyp deps/zlib/ +mkdir deps/zlib/win32 +mv deps/zlib.def deps/zlib/win32 +sed -i -- 's_^#include "chromeconf.h"_//#include "chromeconf.h"_' deps/zlib/zconf.h +``` + +Check that Node.js still builds and tests. + +It may be necessary to update deps/zlib/zlib.gyp if any significant changes have +occurred upstream. + +## Commiting zlib + +Add zlib: `git add --all deps/zlib` + +Commit the changes with a message like +```text +deps: update zlib to upstream d7f3ca9 + +Updated as described in doc/guides/maintaining-zlib.md. +``` diff --git a/doc/offboarding.md b/doc/guides/offboarding.md similarity index 100% rename from doc/offboarding.md rename to doc/guides/offboarding.md diff --git a/doc/onboarding-extras.md b/doc/guides/onboarding-extras.md similarity index 100% rename from doc/onboarding-extras.md rename to doc/guides/onboarding-extras.md diff --git a/doc/releases.md b/doc/guides/releases.md similarity index 97% rename from doc/releases.md rename to doc/guides/releases.md index b20bf3662e3..37018e1b3fd 100644 --- a/doc/releases.md +++ b/doc/guides/releases.md @@ -498,17 +498,6 @@ $ git secure-tag -sm "YYYY-MM-DD Node.js vx.y.z ( -``` - -*Note*: Please do not push the tag unless you are ready to complete the -remainder of the release steps. - ### 12. Set Up For the Next Release On release proposal branch, edit `src/node_version.h` again and: @@ -547,14 +536,49 @@ cherry-pick the "Working on vx.y.z" commit to `master`. Run `make lint` before pushing to `master`, to make sure the Changelog formatting passes the lint rules on `master`. -### 13. Promote and Sign the Release Builds +### 13. Push the release tag + +Push the tag to the repo before you promote the builds. If you haven't pushed +your tag first, then build promotion won't work properly. Push the tag using the +following command: + +```console +$ git push +``` + +*Note*: Please do not push the tag unless you are ready to complete the +remainder of the release steps. + +### 14. Promote and Sign the Release Builds **The same individual who signed the release tag must be the one to promote the builds as the `SHASUMS256.txt` file needs to be signed with the same GPG key!** -Use `tools/release.sh` to promote and sign the build. When run, it will perform -the following actions: +Use `tools/release.sh` to promote and sign the build. Before doing this, you'll +need to ensure you've loaded the correct ssh key, or you'll see the following: + +```sh +# Checking for releases ... +Enter passphrase for key '/Users//.ssh/id_rsa': +dist@direct.nodejs.org's password: +``` + +The key can be loaded either with `ssh-add`: + +```sh +# Substitute node_id_rsa with whatever you've named the key +$ ssh-add ~/.ssh/node_id_rsa +``` + +or at runtime with: + +```sh +# Substitute node_id_rsa with whatever you've named the key +$ ./tools/release.sh -i ~/.ssh/node_id_rsa +``` + +`tools/release.sh` will perform the following actions when run: **a.** Select a GPG key from your private keys. It will use a command similar to: `gpg --list-secret-keys` to list your keys. If you don't have any keys, it @@ -598,7 +622,7 @@ be prompted to re-sign `SHASUMS256.txt`. **It is possible to only sign a release by running `./tools/release.sh -s vX.Y.Z`.** -### 14. Check the Release +### 15. Check the Release Your release should be available at `https://nodejs.org/dist/vx.y.z/` and . Check that the appropriate files are in @@ -607,7 +631,7 @@ have the right internal version strings. Check that the API docs are available at . Check that the release catalog files are correct at and . -### 15. Create a Blog Post +### 16. Create a Blog Post There is an automatic build that is kicked off when you promote new builds, so within a few minutes nodejs.org will be listing your new version as the latest @@ -640,7 +664,7 @@ This script will use the promoted builds and changelog to generate the post. Run * Changes to `master` on the [nodejs.org repository][] will trigger a new build of nodejs.org so your changes should appear a few minutes after pushing. -### 16. Create the release on GitHub +### 17. Create the release on GitHub * Go to the [New release page](https://github.com/nodejs/node/releases/new). * Select the tag version you pushed earlier. @@ -648,11 +672,11 @@ This script will use the promoted builds and changelog to generate the post. Run * For the description, copy the rest of the changelog entry. * Click on the "Publish release" button. -### 17. Cleanup +### 18. Cleanup Close your release proposal PR and delete the proposal branch. -### 18. Announce +### 19. Announce The nodejs.org website will automatically rebuild and include the new version. To announce the build on Twitter through the official @nodejs account, email @@ -669,7 +693,7 @@ announcements. Ping the IRC ops and the other [Partner Communities][] liaisons. -### 19. Celebrate +### 20. Celebrate _In whatever form you do this..._ diff --git a/doc/guides/security_release_process.md b/doc/guides/security-release-process.md similarity index 100% rename from doc/guides/security_release_process.md rename to doc/guides/security-release-process.md diff --git a/benchmark/writing-and-running-benchmarks.md b/doc/guides/writing-and-running-benchmarks.md similarity index 99% rename from benchmark/writing-and-running-benchmarks.md rename to doc/guides/writing-and-running-benchmarks.md index 1db72d22de5..0075023ce80 100644 --- a/benchmark/writing-and-running-benchmarks.md +++ b/doc/guides/writing-and-running-benchmarks.md @@ -519,7 +519,8 @@ const common = require('../common.js'); const bench = common.createBenchmark(main, { kb: [64, 128, 256, 1024], - connections: [100, 500] + connections: [100, 500], + duration: 5 }); function main(conf) { @@ -546,8 +547,8 @@ Supported options keys are: * `path` - defaults to `/` * `connections` - number of concurrent connections to use, defaults to 100 * `duration` - duration of the benchmark in seconds, defaults to 10 -* `benchmarker` - benchmarker to use, defaults to -`common.default_http_benchmarker` +* `benchmarker` - benchmarker to use, defaults to the first available http + benchmarker [autocannon]: https://github.com/mcollina/autocannon [wrk]: https://github.com/wg/wrk diff --git a/doc/node.1 b/doc/node.1 index f251eab8dff..71325cc1cde 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -119,6 +119,11 @@ Enable experimental ES modules support for import.meta.resolve(). .It Fl -experimental-json-modules Enable experimental JSON interop support for the ES Module loader. . +.It Fl -experimental-loader Ns = Ns Ar module +Specify the +.Ar module +to use as a custom module loader. +. .It Fl -experimental-modules Enable experimental latest experimental modules features. . @@ -220,17 +225,21 @@ Default is V8 Inspector integration allows attaching Chrome DevTools and IDEs to Node.js instances for debugging and profiling. It uses the Chrome DevTools Protocol. . -.It Fl -experimental-loader Ns = Ns Ar module -Specify the -.Ar module -to use as a custom module loader. -. .It Fl -insecure-http-parser Use an insecure HTTP parser that accepts invalid HTTP headers. This may allow interoperability with non-conformant HTTP implementations. It may also allow request smuggling and other HTTP attacks that rely on invalid headers being accepted. Avoid using this option. . +.It Fl -jitless +Disable runtime allocation of executable memory. This may be required on +some platforms for security reasons. It can also reduce attack surface on +other platforms, but the performance impact may be severe. +. +.Pp +This flag is inherited from V8 and is subject to change upstream. It may +disappear in a non-semver-major release. +. .It Fl -max-http-header-size Ns = Ns Ar size Specify the maximum size of HTTP headers in bytes. Defaults to 8KB. . diff --git a/lib/_http_agent.js b/lib/_http_agent.js index 5e3298b594d..a527e84865c 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -120,6 +120,12 @@ function Agent(options) { socket[async_id_symbol] = -1; socket._httpMessage = null; this.removeSocket(socket, options); + + const agentTimeout = this.options.timeout || 0; + if (socket.timeout !== agentTimeout) { + socket.setTimeout(agentTimeout); + } + freeSockets.push(socket); } else { // Implementation doesn't want to keep socket alive @@ -202,12 +208,21 @@ Agent.prototype.addRequest = function addRequest(req, options, port/* legacy */, this.sockets[name] = []; } - const freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0; + const freeSockets = this.freeSockets[name]; + let socket; + if (freeSockets) { + while (freeSockets.length && freeSockets[0].destroyed) { + freeSockets.shift(); + } + socket = freeSockets.shift(); + if (!freeSockets.length) + delete this.freeSockets[name]; + } + + const freeLen = freeSockets ? freeSockets.length : 0; const sockLen = freeLen + this.sockets[name].length; - if (freeLen) { - // We have a free socket, so use that. - const socket = this.freeSockets[name].shift(); + if (socket) { // Guard against an uninitialized or user supplied Socket. const handle = socket._handle; if (handle && typeof handle.asyncReset === 'function') { @@ -216,10 +231,6 @@ Agent.prototype.addRequest = function addRequest(req, options, port/* legacy */, socket[async_id_symbol] = handle.getAsyncId(); } - // don't leak - if (!this.freeSockets[name].length) - delete this.freeSockets[name]; - this.reuseSocket(socket, req); setRequestSocket(this, req, socket); this.sockets[name].push(socket); @@ -319,6 +330,20 @@ function installListeners(agent, s, options) { } s.on('close', onClose); + function onTimeout() { + debug('CLIENT socket onTimeout'); + + // Destroy if in free list. + // TODO(ronag): Always destroy, even if not in free list. + const sockets = agent.freeSockets; + for (const name of ObjectKeys(sockets)) { + if (sockets[name].includes(s)) { + return s.destroy(); + } + } + } + s.on('timeout', onTimeout); + function onRemove() { // We need this function for cases like HTTP 'upgrade' // (defined by WebSockets) where we need to remove a socket from the @@ -327,6 +352,7 @@ function installListeners(agent, s, options) { agent.removeSocket(s, options); s.removeListener('close', onClose); s.removeListener('free', onFree); + s.removeListener('timeout', onTimeout); s.removeListener('agentRemove', onRemove); } s.on('agentRemove', onRemove); @@ -409,14 +435,6 @@ function setRequestSocket(agent, req, socket) { return; } socket.setTimeout(req.timeout); - // Reset timeout after response end - req.once('response', (res) => { - res.once('end', () => { - if (socket.timeout !== agentTimeout) { - socket.setTimeout(agentTimeout); - } - }); - }); } function emitErrorNT(emitter, err) { diff --git a/lib/_http_client.js b/lib/_http_client.js index fef4b635a00..c447b695c8b 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -29,6 +29,7 @@ const { ObjectAssign, ObjectKeys, ObjectSetPrototypeOf, + Symbol } = primordials; const net = require('net'); @@ -65,6 +66,7 @@ const { } = require('internal/dtrace'); const INVALID_PATH_REGEX = /[^\u0021-\u00ff]/; +const kError = Symbol('kError'); function validateHost(host, name) { if (host !== null && host !== undefined && typeof host !== 'string') { @@ -337,10 +339,19 @@ ClientRequest.prototype._implicitHeader = function _implicitHeader() { }; ClientRequest.prototype.abort = function abort() { - if (!this.aborted) { - process.nextTick(emitAbortNT, this); + if (this.aborted) { + return; } this.aborted = true; + process.nextTick(emitAbortNT, this); + this.destroy(); +}; + +ClientRequest.prototype.destroy = function destroy(err) { + if (this.destroyed) { + return; + } + this.destroyed = true; // If we're aborting, we don't care about any more response data. if (this.res) { @@ -350,11 +361,29 @@ ClientRequest.prototype.abort = function abort() { // In the event that we don't have a socket, we will pop out of // the request queue through handling in onSocket. if (this.socket) { - // in-progress - this.socket.destroy(); + _destroy(this, this.socket, err); + } else if (err) { + this[kError] = err; } }; +function _destroy(req, socket, err) { + // TODO (ronag): Check if socket was used at all (e.g. headersSent) and + // re-use it in that case. `req.socket` just checks whether the socket was + // assigned to the request and *might* have been used. + if (!req.agent || req.socket) { + socket.destroy(err); + } else { + socket.emit('free'); + if (!req.aborted && !err) { + err = connResetException('socket hang up'); + } + if (err) { + req.emit('error', err); + } + req.emit('close'); + } +} function emitAbortNT(req) { req.emit('abort'); @@ -750,14 +779,8 @@ ClientRequest.prototype.onSocket = function onSocket(socket) { }; function onSocketNT(req, socket) { - if (req.aborted) { - // If we were aborted while waiting for a socket, skip the whole thing. - if (!req.agent) { - socket.destroy(); - } else { - req.emit('close'); - socket.emit('free'); - } + if (req.destroyed) { + _destroy(req, socket, req[kError]); } else { tickOnSocket(req, socket); } diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index c1f277da3ff..a7bd6af6007 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -93,6 +93,7 @@ function OutgoingMessage() { this.outputSize = 0; this.writable = true; + this.destroyed = false; this._last = false; this.chunkedEncoding = false; @@ -277,6 +278,11 @@ OutgoingMessage.prototype.setTimeout = function setTimeout(msecs, callback) { // any messages, before ever calling this. In that case, just skip // it, since something else is destroying this connection anyway. OutgoingMessage.prototype.destroy = function destroy(error) { + if (this.destroyed) { + return; + } + this.destroyed = true; + if (this.socket) { this.socket.destroy(error); } else { @@ -619,7 +625,7 @@ OutgoingMessage.prototype.removeHeader = function removeHeader(name) { OutgoingMessage.prototype._implicitHeader = function _implicitHeader() { - this.emit('error', new ERR_METHOD_NOT_IMPLEMENTED('_implicitHeader()')); + throw new ERR_METHOD_NOT_IMPLEMENTED('_implicitHeader()'); }; ObjectDefineProperty(OutgoingMessage.prototype, 'headersSent', { @@ -641,17 +647,20 @@ OutgoingMessage.prototype.write = function write(chunk, encoding, callback) { return ret; }; +function writeAfterEnd(msg, callback) { + const err = new ERR_STREAM_WRITE_AFTER_END(); + const triggerAsyncId = msg.socket ? msg.socket[async_id_symbol] : undefined; + defaultTriggerAsyncIdScope(triggerAsyncId, + process.nextTick, + writeAfterEndNT, + msg, + err, + callback); +} + function write_(msg, chunk, encoding, callback, fromEnd) { if (msg.finished) { - const err = new ERR_STREAM_WRITE_AFTER_END(); - const triggerAsyncId = msg.socket ? msg.socket[async_id_symbol] : undefined; - defaultTriggerAsyncIdScope(triggerAsyncId, - process.nextTick, - writeAfterEndNT, - msg, - err, - callback); - + writeAfterEnd(msg, callback); return true; } @@ -748,17 +757,6 @@ OutgoingMessage.prototype.end = function end(chunk, encoding, callback) { encoding = null; } - if (this.finished) { - if (typeof callback === 'function') { - if (!this.writableFinished) { - this.on('finish', callback); - } else { - callback(new ERR_STREAM_ALREADY_FINISHED('end')); - } - } - return this; - } - if (this.socket) { this.socket.cork(); } @@ -767,6 +765,12 @@ OutgoingMessage.prototype.end = function end(chunk, encoding, callback) { if (typeof chunk !== 'string' && !(chunk instanceof Buffer)) { throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); } + + if (this.finished) { + writeAfterEnd(this, callback); + return this; + } + if (!this._header) { if (typeof chunk === 'string') this._contentLength = Buffer.byteLength(chunk, encoding); @@ -774,6 +778,15 @@ OutgoingMessage.prototype.end = function end(chunk, encoding, callback) { this._contentLength = chunk.length; } write_(this, chunk, encoding, null, true); + } else if (this.finished) { + if (typeof callback === 'function') { + if (!this.writableFinished) { + this.on('finish', callback); + } else { + callback(new ERR_STREAM_ALREADY_FINISHED('end')); + } + } + return this; } else if (!this._header) { this._contentLength = 0; this._implicitHeader(); @@ -893,10 +906,6 @@ OutgoingMessage.prototype.flushHeaders = function flushHeaders() { this._send(''); }; -OutgoingMessage.prototype.flush = internalUtil.deprecate(function() { - this.flushHeaders(); -}, 'OutgoingMessage.flush is deprecated. Use flushHeaders instead.', 'DEP0001'); - OutgoingMessage.prototype.pipe = function pipe() { // OutgoingMessage should be write-only. Piping from it is disabled. this.emit('error', new ERR_STREAM_CANNOT_PIPE()); diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 30c808efd5c..1993d29db64 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -628,7 +628,7 @@ function maybeReadMore_(stream, state) { // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. Readable.prototype._read = function(n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); + throw new ERR_METHOD_NOT_IMPLEMENTED('_read()'); }; Readable.prototype.pipe = function(dest, pipeOpts) { diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js index cb4aae2e6d1..5928afc2581 100644 --- a/lib/_stream_transform.js +++ b/lib/_stream_transform.js @@ -163,7 +163,7 @@ Transform.prototype.push = function(chunk, encoding) { // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. Transform.prototype._transform = function(chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); + throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()'); }; Transform.prototype._write = function(chunk, encoding, cb) { diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index 2180b091b7b..8bc916a3a95 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -273,13 +273,8 @@ Writable.prototype.write = function(chunk, encoding, cb) { cb = nop; } - let err; - if (state.ending) { - err = new ERR_STREAM_WRITE_AFTER_END(); - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('write'); - } else if (chunk === null) { - err = new ERR_STREAM_NULL_VALUES(); + if (chunk === null) { + throw new ERR_STREAM_NULL_VALUES(); } else if (!state.objectMode) { if (typeof chunk === 'string') { if (state.decodeStrings !== false) { @@ -292,11 +287,18 @@ Writable.prototype.write = function(chunk, encoding, cb) { chunk = Stream._uint8ArrayToBuffer(chunk); encoding = 'buffer'; } else { - err = new ERR_INVALID_ARG_TYPE( + throw new ERR_INVALID_ARG_TYPE( 'chunk', ['string', 'Buffer', 'Uint8Array'], chunk); } } + let err; + if (state.ending) { + err = new ERR_STREAM_WRITE_AFTER_END(); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED('write'); + } + if (err) { process.nextTick(cb, err); errorOrDestroy(this, err, true); @@ -560,7 +562,7 @@ Writable.prototype._write = function(chunk, encoding, cb) { if (this._writev) { this._writev([{ chunk, encoding }], cb); } else { - process.nextTick(cb, new ERR_METHOD_NOT_IMPLEMENTED('_write()')); + throw new ERR_METHOD_NOT_IMPLEMENTED('_write()'); } }; @@ -687,7 +689,6 @@ function endWritable(stream, state, cb) { onFinished(stream, state, cb); } state.ended = true; - stream.writable = false; } function onCorkedFinish(corkReq, state, err) { @@ -741,7 +742,7 @@ ObjectDefineProperties(Writable.prototype, { get() { const w = this._writableState; if (!w) return false; - if (w.writable !== undefined) return w.writable; + if (w.writable !== undefined) return w.writable && !w.ended; return Boolean(!w.destroyed && !w.errored && !w.ending); }, set(val) { diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index aeb1cfe7af9..561bb1f2d55 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -66,11 +66,16 @@ const { ERR_TLS_RENEGOTIATION_DISABLED, ERR_TLS_REQUIRED_SERVER_NAME, ERR_TLS_SESSION_ATTACK, - ERR_TLS_SNI_FROM_SERVER + ERR_TLS_SNI_FROM_SERVER, + ERR_TLS_INVALID_STATE } = codes; const { onpskexchange: kOnPskExchange } = internalBinding('symbols'); const { getOptionValue } = require('internal/options'); -const { validateString, validateBuffer } = require('internal/validators'); +const { + validateString, + validateBuffer, + validateUint32 +} = require('internal/validators'); const traceTls = getOptionValue('--trace-tls'); const tlsKeylog = getOptionValue('--tls-keylog'); const { appendFile } = require('fs'); @@ -860,6 +865,18 @@ TLSSocket.prototype.renegotiate = function(options, callback) { return true; }; +TLSSocket.prototype.exportKeyingMaterial = function(length, label, context) { + validateUint32(length, 'length', true); + validateString(label, 'label'); + if (context !== undefined) + validateBuffer(context, 'context'); + + if (!this._secureEstablished) + throw new ERR_TLS_INVALID_STATE(); + + return this._handle.exportKeyingMaterial(length, label, context); +}; + TLSSocket.prototype.setMaxSendFragment = function setMaxSendFragment(size) { return this._handle.setMaxSendFragment(size) === 1; }; @@ -1611,7 +1628,7 @@ exports.connect = function connect(...args) { tlssock._start(); tlssock.on('secure', onConnectSecure); - tlssock.once('end', onConnectEnd); + tlssock.prependListener('end', onConnectEnd); return tlssock; }; diff --git a/lib/async_hooks.js b/lib/async_hooks.js index 3ebc9af473d..d676f6dfcb4 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -209,11 +209,105 @@ class AsyncResource { } } +const storageList = []; +const storageHook = createHook({ + init(asyncId, type, triggerAsyncId, resource) { + const currentResource = executionAsyncResource(); + // Value of currentResource is always a non null object + for (let i = 0; i < storageList.length; ++i) { + storageList[i]._propagate(resource, currentResource); + } + } +}); + +class AsyncLocalStorage { + constructor() { + this.kResourceStore = Symbol('kResourceStore'); + this.enabled = false; + } + + disable() { + if (this.enabled) { + this.enabled = false; + // If this.enabled, the instance must be in storageList + storageList.splice(storageList.indexOf(this), 1); + if (storageList.length === 0) { + storageHook.disable(); + } + } + } + + // Propagate the context from a parent resource to a child one + _propagate(resource, triggerResource) { + const store = triggerResource[this.kResourceStore]; + if (this.enabled) { + resource[this.kResourceStore] = store; + } + } + + enterWith(store) { + if (!this.enabled) { + this.enabled = true; + storageList.push(this); + storageHook.enable(); + } + const resource = executionAsyncResource(); + resource[this.kResourceStore] = store; + } + + runSyncAndReturn(store, callback, ...args) { + const resource = executionAsyncResource(); + const outerStore = resource[this.kResourceStore]; + this.enterWith(store); + try { + return callback(...args); + } finally { + resource[this.kResourceStore] = outerStore; + } + } + + exitSyncAndReturn(callback, ...args) { + if (!this.enabled) { + return callback(...args); + } + this.enabled = false; + try { + return callback(...args); + } finally { + this.enabled = true; + } + } + + getStore() { + const resource = executionAsyncResource(); + if (this.enabled) { + return resource[this.kResourceStore]; + } + } + + run(store, callback, ...args) { + const resource = executionAsyncResource(); + const outerStore = resource[this.kResourceStore]; + this.enterWith(store); + process.nextTick(callback, ...args); + resource[this.kResourceStore] = outerStore; + } + + exit(callback, ...args) { + if (!this.enabled) { + return process.nextTick(callback, ...args); + } + this.enabled = false; + process.nextTick(callback, ...args); + this.enabled = true; + } +} // Placing all exports down here because the exported classes won't export // otherwise. module.exports = { // Public API + AsyncLocalStorage, createHook, executionAsyncId, triggerAsyncId, diff --git a/lib/dgram.js b/lib/dgram.js index 26d1e1d11a0..ddac50ade19 100644 --- a/lib/dgram.js +++ b/lib/dgram.js @@ -35,17 +35,12 @@ const { newHandle, } = require('internal/dgram'); const { guessHandleType } = internalBinding('util'); -const { - isLegalPort, -} = require('internal/net'); const { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_SOCKET_ALREADY_BOUND, ERR_SOCKET_BAD_BUFFER_SIZE, - ERR_SOCKET_BAD_PORT, ERR_SOCKET_BUFFER_SIZE, - ERR_SOCKET_CANNOT_SEND, ERR_SOCKET_DGRAM_IS_CONNECTED, ERR_SOCKET_DGRAM_NOT_CONNECTED, ERR_SOCKET_DGRAM_NOT_RUNNING, @@ -54,7 +49,8 @@ const { const { isInt32, validateString, - validateNumber + validateNumber, + validatePort, } = require('internal/validators'); const { Buffer } = require('buffer'); const { deprecate } = require('internal/util'); @@ -232,7 +228,9 @@ Socket.prototype.bind = function(port_, address_ /* , callback */) { this.on('listening', onListening); } - if (port instanceof UDP) { + if (port !== null && + typeof port === 'object' && + typeof port.recvStart === 'function') { replaceHandle(this, port); startListening(this); return this; @@ -350,21 +348,8 @@ Socket.prototype.bind = function(port_, address_ /* , callback */) { return this; }; - -function validatePort(port) { - const legal = isLegalPort(port); - if (legal) - port = port | 0; - - if (!legal || port === 0) - throw new ERR_SOCKET_BAD_PORT(port); - - return port; -} - - Socket.prototype.connect = function(port, address, callback) { - port = validatePort(port); + port = validatePort(port, 'Port', { allowZero: false }); if (typeof address === 'function') { callback = address; address = ''; @@ -506,7 +491,7 @@ function enqueue(self, toEnqueue) { // event handler that flushes the send queue after binding is done. if (state.queue === undefined) { state.queue = []; - self.once('error', onListenError); + self.once(EventEmitter.errorMonitor, onListenError); self.once('listening', onListenSuccess); } state.queue.push(toEnqueue); @@ -514,7 +499,7 @@ function enqueue(self, toEnqueue) { function onListenSuccess() { - this.removeListener('error', onListenError); + this.removeListener(EventEmitter.errorMonitor, onListenError); clearQueue.call(this); } @@ -522,7 +507,6 @@ function onListenSuccess() { function onListenError(err) { this.removeListener('listening', onListenSuccess); this[kStateSymbol].queue = undefined; - this.emit('error', new ERR_SOCKET_CANNOT_SEND()); } @@ -610,7 +594,7 @@ Socket.prototype.send = function(buffer, } if (!connected) - port = validatePort(port); + port = validatePort(port, 'Port', { allowZero: false }); // Normalize callback so it's either a function or undefined but not anything // else. diff --git a/lib/dns.js b/lib/dns.js index 8a6c7456bab..e33dd2620e1 100644 --- a/lib/dns.js +++ b/lib/dns.js @@ -29,7 +29,7 @@ const { const cares = internalBinding('cares_wrap'); const { toASCII } = require('internal/idna'); -const { isIP, isLegalPort } = require('internal/net'); +const { isIP } = require('internal/net'); const { customPromisifyArgs } = require('internal/util'); const errors = require('internal/errors'); const { @@ -45,9 +45,11 @@ const { ERR_INVALID_CALLBACK, ERR_INVALID_OPT_VALUE, ERR_MISSING_ARGS, - ERR_SOCKET_BAD_PORT } = errors.codes; -const { validateString } = require('internal/validators'); +const { + validatePort, + validateString, +} = require('internal/validators'); const { GetAddrInfoReqWrap, @@ -175,8 +177,7 @@ function lookupService(address, port, callback) { if (isIP(address) === 0) throw new ERR_INVALID_OPT_VALUE('address', address); - if (!isLegalPort(port)) - throw new ERR_SOCKET_BAD_PORT(port); + validatePort(port); if (typeof callback !== 'function') throw new ERR_INVALID_CALLBACK(callback); diff --git a/lib/events.js b/lib/events.js index 76b376e7891..b138979e138 100644 --- a/lib/events.js +++ b/lib/events.js @@ -31,7 +31,6 @@ const { ObjectDefineProperty, ObjectGetPrototypeOf, ObjectSetPrototypeOf, - ObjectKeys, Promise, PromiseReject, PromiseResolve, @@ -90,12 +89,7 @@ ObjectDefineProperty(EventEmitter, 'captureRejections', { enumerable: true }); -ObjectDefineProperty(EventEmitter, 'errorMonitor', { - value: kErrorMonitor, - writable: false, - configurable: true, - enumerable: true -}); +EventEmitter.errorMonitor = kErrorMonitor; // The default for captureRejections is false ObjectDefineProperty(EventEmitter.prototype, kCapture, { @@ -531,7 +525,7 @@ EventEmitter.prototype.removeAllListeners = // Emit removeListener for all listeners on all events if (arguments.length === 0) { - for (const key of ObjectKeys(events)) { + for (const key of ReflectOwnKeys(events)) { if (key === 'removeListener') continue; this.removeAllListeners(key); } diff --git a/lib/fs.js b/lib/fs.js index a85b8ab29eb..52b7bdd810e 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -109,9 +109,10 @@ const { parseFileMode, validateBuffer, validateInteger, - validateInt32, - validateUint32 + validateInt32 } = require('internal/validators'); +// 2 ** 32 - 1 +const kMaxUserId = 4294967295; let truncateWarn = true; let fs; @@ -1153,8 +1154,8 @@ function chmodSync(path, mode) { function lchown(path, uid, gid, callback) { callback = makeCallback(callback); path = getValidatedPath(path); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const req = new FSReqCallback(); req.oncomplete = callback; binding.lchown(pathModule.toNamespacedPath(path), uid, gid, req); @@ -1162,8 +1163,8 @@ function lchown(path, uid, gid, callback) { function lchownSync(path, uid, gid) { path = getValidatedPath(path); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const ctx = { path }; binding.lchown(pathModule.toNamespacedPath(path), uid, gid, undefined, ctx); handleErrorFromBinding(ctx); @@ -1171,8 +1172,8 @@ function lchownSync(path, uid, gid) { function fchown(fd, uid, gid, callback) { validateInt32(fd, 'fd', 0); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const req = new FSReqCallback(); req.oncomplete = makeCallback(callback); @@ -1181,8 +1182,8 @@ function fchown(fd, uid, gid, callback) { function fchownSync(fd, uid, gid) { validateInt32(fd, 'fd', 0); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const ctx = {}; binding.fchown(fd, uid, gid, undefined, ctx); @@ -1192,8 +1193,8 @@ function fchownSync(fd, uid, gid) { function chown(path, uid, gid, callback) { callback = makeCallback(callback); path = getValidatedPath(path); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const req = new FSReqCallback(); req.oncomplete = callback; @@ -1202,8 +1203,8 @@ function chown(path, uid, gid, callback) { function chownSync(path, uid, gid) { path = getValidatedPath(path); - validateUint32(uid, 'uid'); - validateUint32(gid, 'gid'); + validateInteger(uid, 'uid', -1, kMaxUserId); + validateInteger(gid, 'gid', -1, kMaxUserId); const ctx = { path }; binding.chown(pathModule.toNamespacedPath(path), uid, gid, undefined, ctx); handleErrorFromBinding(ctx); @@ -1248,9 +1249,9 @@ function futimesSync(fd, atime, mtime) { handleErrorFromBinding(ctx); } -function writeAll(fd, isUserFd, buffer, offset, length, position, callback) { +function writeAll(fd, isUserFd, buffer, offset, length, callback) { // write(fd, buffer, offset, length, position, callback) - fs.write(fd, buffer, offset, length, position, (writeErr, written) => { + fs.write(fd, buffer, offset, length, null, (writeErr, written) => { if (writeErr) { if (isUserFd) { callback(writeErr); @@ -1268,10 +1269,7 @@ function writeAll(fd, isUserFd, buffer, offset, length, position, callback) { } else { offset += written; length -= written; - if (position !== null) { - position += written; - } - writeAll(fd, isUserFd, buffer, offset, length, position, callback); + writeAll(fd, isUserFd, buffer, offset, length, callback); } }); } @@ -1288,7 +1286,7 @@ function writeFile(path, data, options, callback) { if (isFd(path)) { const isUserFd = true; - writeAll(path, isUserFd, data, 0, data.byteLength, null, callback); + writeAll(path, isUserFd, data, 0, data.byteLength, callback); return; } @@ -1297,8 +1295,7 @@ function writeFile(path, data, options, callback) { callback(openErr); } else { const isUserFd = false; - const position = /a/.test(flag) ? null : 0; - writeAll(fd, isUserFd, data, 0, data.byteLength, position, callback); + writeAll(fd, isUserFd, data, 0, data.byteLength, callback); } }); } @@ -1318,15 +1315,11 @@ function writeFileSync(path, data, options) { let offset = 0; let length = data.byteLength; - let position = (/a/.test(flag) || isUserFd) ? null : 0; try { while (length > 0) { - const written = fs.writeSync(fd, data, offset, length, position); + const written = fs.writeSync(fd, data, offset, length); offset += written; length -= written; - if (position !== null) { - position += written; - } } } finally { if (!isUserFd) fs.closeSync(fd); diff --git a/lib/fs/promises.js b/lib/fs/promises.js new file mode 100644 index 00000000000..1fa3a185dea --- /dev/null +++ b/lib/fs/promises.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('internal/fs/promises').exports; diff --git a/lib/internal/cluster/master.js b/lib/internal/cluster/master.js index 9bdb0181d3d..46c77900f42 100644 --- a/lib/internal/cluster/master.js +++ b/lib/internal/cluster/master.js @@ -14,13 +14,12 @@ const RoundRobinHandle = require('internal/cluster/round_robin_handle'); const SharedHandle = require('internal/cluster/shared_handle'); const Worker = require('internal/cluster/worker'); const { internal, sendHelper } = require('internal/cluster/utils'); -const { ERR_SOCKET_BAD_PORT } = require('internal/errors').codes; const cluster = new EventEmitter(); const intercom = new EventEmitter(); const SCHED_NONE = 1; const SCHED_RR = 2; -const { isLegalPort } = require('internal/net'); const [ minPort, maxPort ] = [ 1024, 65535 ]; +const { validatePort } = require('internal/validators'); module.exports = cluster; @@ -118,9 +117,7 @@ function createWorkerProcess(id, env) { else inspectPort = cluster.settings.inspectPort; - if (!isLegalPort(inspectPort)) { - throw new ERR_SOCKET_BAD_PORT(inspectPort); - } + validatePort(inspectPort); } else { inspectPort = process.debugPort + debugPortOffset; if (inspectPort > maxPort) diff --git a/lib/internal/crypto/cipher.js b/lib/internal/crypto/cipher.js index add56eae680..80b0c0e9dab 100644 --- a/lib/internal/crypto/cipher.js +++ b/lib/internal/crypto/cipher.js @@ -151,13 +151,13 @@ Cipher.prototype.update = function update(data, inputEncoding, outputEncoding) { inputEncoding = inputEncoding || encoding; outputEncoding = outputEncoding || encoding; - if (typeof data !== 'string' && !isArrayBufferView(data)) { + if (typeof data === 'string') { + validateEncoding(data, inputEncoding); + } else if (!isArrayBufferView(data)) { throw new ERR_INVALID_ARG_TYPE( 'data', ['string', 'Buffer', 'TypedArray', 'DataView'], data); } - validateEncoding(data, inputEncoding); - const ret = this[kHandle].update(data, inputEncoding); if (outputEncoding && outputEncoding !== 'buffer') { diff --git a/lib/internal/crypto/hash.js b/lib/internal/crypto/hash.js index dca0ba767f6..1cf0188da2f 100644 --- a/lib/internal/crypto/hash.js +++ b/lib/internal/crypto/hash.js @@ -78,17 +78,13 @@ Hash.prototype.update = function update(data, encoding) { if (state[kFinalized]) throw new ERR_CRYPTO_HASH_FINALIZED(); - if (typeof data !== 'string' && !isArrayBufferView(data)) { - throw new ERR_INVALID_ARG_TYPE('data', - ['string', - 'Buffer', - 'TypedArray', - 'DataView'], - data); + if (typeof data === 'string') { + validateEncoding(data, encoding); + } else if (!isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE( + 'data', ['string', 'Buffer', 'TypedArray', 'DataView'], data); } - validateEncoding(data, encoding); - if (!this[kHandle].update(data, encoding)) throw new ERR_CRYPTO_HASH_UPDATE_FAILED(); return this; diff --git a/lib/internal/dns/promises.js b/lib/internal/dns/promises.js index ae007fd3193..6ade8854964 100644 --- a/lib/internal/dns/promises.js +++ b/lib/internal/dns/promises.js @@ -14,7 +14,7 @@ const { } = require('internal/dns/utils'); const { codes, dnsException } = require('internal/errors'); const { toASCII } = require('internal/idna'); -const { isIP, isLegalPort } = require('internal/net'); +const { isIP } = require('internal/net'); const { getaddrinfo, getnameinfo, @@ -27,10 +27,11 @@ const { ERR_INVALID_ARG_TYPE, ERR_INVALID_OPT_VALUE, ERR_MISSING_ARGS, - ERR_SOCKET_BAD_PORT } = codes; -const { validateString } = require('internal/validators'); - +const { + validatePort, + validateString +} = require('internal/validators'); function onlookup(err, addresses) { if (err) { @@ -162,8 +163,7 @@ function lookupService(address, port) { if (isIP(address) === 0) throw new ERR_INVALID_OPT_VALUE('address', address); - if (!isLegalPort(port)) - throw new ERR_SOCKET_BAD_PORT(port); + validatePort(port); return createLookupServicePromise(address, +port); } diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 392a297070d..9f9a0a66f28 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -13,16 +13,20 @@ const { ArrayIsArray, Error, + JSONStringify, Map, MathAbs, NumberIsInteger, ObjectDefineProperty, ObjectKeys, + StringPrototypeSlice, Symbol, SymbolFor, WeakMap, } = primordials; +const sep = process.platform === 'win32' ? '\\' : '/'; + const messages = new Map(); const codes = {}; @@ -754,6 +758,7 @@ E('ERR_CHILD_PROCESS_STDIO_MAXBUFFER', '%s maxBuffer length exceeded', RangeError); E('ERR_CONSOLE_WRITABLE_STREAM', 'Console expects a writable stream instance for %s', TypeError); +E('ERR_CONTEXT_NOT_INITIALIZED', 'context used is not initialized', Error); E('ERR_CPU_USAGE', 'Unable to obtain cpu usage %s', Error); E('ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED', 'Custom engines not supported by this OpenSSL', Error); @@ -1073,6 +1078,11 @@ E('ERR_INVALID_FILE_URL_PATH', 'File URL path %s', TypeError); E('ERR_INVALID_HANDLE_TYPE', 'This handle type cannot be sent', TypeError); E('ERR_INVALID_HTTP_TOKEN', '%s must be a valid HTTP token ["%s"]', TypeError); E('ERR_INVALID_IP_ADDRESS', 'Invalid IP address: %s', TypeError); +E('ERR_INVALID_MODULE_SPECIFIER', (pkgPath, subpath) => { + assert(subpath !== '.'); + return `Package subpath '${subpath}' is not a valid module request for the ` + + `"exports" resolution of ${pkgPath}${sep}package.json`; +}, TypeError); E('ERR_INVALID_OPT_VALUE', (name, value) => `The value "${String(value)}" is invalid for option "${name}"`, TypeError, @@ -1080,7 +1090,17 @@ E('ERR_INVALID_OPT_VALUE', (name, value) => E('ERR_INVALID_OPT_VALUE_ENCODING', 'The value "%s" is invalid for option "encoding"', TypeError); E('ERR_INVALID_PACKAGE_CONFIG', - 'Invalid package config for \'%s\', %s', Error); + `Invalid package config %s${sep}package.json, %s`, Error); +E('ERR_INVALID_PACKAGE_TARGET', (pkgPath, key, subpath, target) => { + if (key === '.') { + return `Invalid "exports" main target ${JSONStringify(target)} defined ` + + `in the package config ${pkgPath}${sep}package.json`; + } else { + return `Invalid "exports" target ${JSONStringify(target)} defined for '${ + StringPrototypeSlice(key, 0, -subpath.length || key.length)}' in the ` + + `package config ${pkgPath}${sep}package.json`; + } +}, Error); E('ERR_INVALID_PERFORMANCE_MARK', 'The "%s" performance mark has not been set', Error); E('ERR_INVALID_PROTOCOL', @@ -1225,6 +1245,14 @@ E('ERR_OUT_OF_RANGE', msg += ` It must be ${range}. Received ${received}`; return msg; }, RangeError); +E('ERR_PACKAGE_PATH_NOT_EXPORTED', (pkgPath, subpath) => { + if (subpath === '.') { + return `No "exports" main resolved in ${pkgPath}${sep}package.json`; + } else { + return `Package subpath '${subpath}' is not defined by "exports" in ${ + pkgPath}${sep}package.json`; + } +}, Error); E('ERR_REQUIRE_ESM', (filename, parentPath = null, packageJsonPath = null) => { let msg = `Must use import to load ES Module: ${filename}`; @@ -1254,13 +1282,12 @@ E('ERR_SOCKET_ALREADY_BOUND', 'Socket is already bound', Error); E('ERR_SOCKET_BAD_BUFFER_SIZE', 'Buffer size must be a positive integer', TypeError); E('ERR_SOCKET_BAD_PORT', - 'Port should be >= 0 and < 65536. Received %s.', RangeError); + '%s should be >= 0 and < 65536. Received %s.', RangeError); E('ERR_SOCKET_BAD_TYPE', 'Bad socket type specified. Valid types are: udp4, udp6', TypeError); E('ERR_SOCKET_BUFFER_SIZE', 'Could not get or set buffer size', SystemError); -E('ERR_SOCKET_CANNOT_SEND', 'Unable to send data', Error); E('ERR_SOCKET_CLOSED', 'Socket is closed', Error); E('ERR_SOCKET_DGRAM_IS_CONNECTED', 'Already connected', Error); E('ERR_SOCKET_DGRAM_NOT_CONNECTED', 'Not connected', Error); @@ -1291,6 +1318,8 @@ E('ERR_TLS_CERT_ALTNAME_INVALID', function(reason, host, cert) { E('ERR_TLS_DH_PARAM_SIZE', 'DH parameter size %s is less than 2048', Error); E('ERR_TLS_HANDSHAKE_TIMEOUT', 'TLS handshake timeout', Error); E('ERR_TLS_INVALID_CONTEXT', '%s must be a SecureContext', TypeError), +E('ERR_TLS_INVALID_STATE', 'TLS socket connection must be securely established', + Error), E('ERR_TLS_INVALID_PROTOCOL_VERSION', '%j is not a valid %s TLS protocol version', TypeError); E('ERR_TLS_PROTOCOL_VERSION_CONFLICT', @@ -1358,12 +1387,13 @@ E('ERR_VM_MODULE_NOT_MODULE', 'Provided module is not an instance of Module', Error); E('ERR_VM_MODULE_STATUS', 'Module status %s', Error); E('ERR_WASI_ALREADY_STARTED', 'WASI instance has already started', Error); +E('ERR_WORKER_INIT_FAILED', 'Worker initialization failure: %s', Error); E('ERR_WORKER_INVALID_EXEC_ARGV', (errors, msg = 'invalid execArgv flags') => `Initiated Worker with ${msg}: ${errors.join(', ')}`, Error); E('ERR_WORKER_NOT_RUNNING', 'Worker instance not running', Error); -E('ERR_WORKER_OUT_OF_MEMORY', 'Worker terminated due to reaching memory limit', - Error); +E('ERR_WORKER_OUT_OF_MEMORY', + 'Worker terminated due to reaching memory limit: %s', Error); E('ERR_WORKER_PATH', 'The worker script filename must be an absolute path or a relative ' + 'path starting with \'./\' or \'../\'. Received "%s"', diff --git a/lib/internal/fs/streams.js b/lib/internal/fs/streams.js index c121273861d..a76a8f6895c 100644 --- a/lib/internal/fs/streams.js +++ b/lib/internal/fs/streams.js @@ -291,7 +291,8 @@ function WriteStream(path, options) { options.decodeStrings = true; if (options.autoDestroy === undefined) { - options.autoDestroy = false; + options.autoDestroy = options.autoClose === undefined ? + true : (options.autoClose || false); } this[kFs] = options.fs || fs; @@ -337,7 +338,7 @@ function WriteStream(path, options) { this.mode = options.mode === undefined ? 0o666 : options.mode; this.start = options.start; - this.autoClose = options.autoClose === undefined ? true : !!options.autoClose; + this.autoClose = options.autoDestroy; this.pos = undefined; this.bytesWritten = 0; this.closed = false; @@ -365,10 +366,6 @@ WriteStream.prototype._final = function(callback) { }); } - if (this.autoClose) { - this.destroy(); - } - callback(); }; @@ -419,9 +416,6 @@ WriteStream.prototype._write = function(data, encoding, cb) { } if (er) { - if (this.autoClose) { - this.destroy(); - } return cb(er); } this.bytesWritten += bytes; @@ -464,7 +458,7 @@ WriteStream.prototype._writev = function(data, cb) { if (er) { if (this.autoClose) { - this.destroy(); + this.destroy(er); } return cb(er); } diff --git a/lib/internal/histogram.js b/lib/internal/histogram.js new file mode 100644 index 00000000000..6deb8314a41 --- /dev/null +++ b/lib/internal/histogram.js @@ -0,0 +1,94 @@ +'use strict'; + +const { + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { format } = require('util'); +const { Map, Symbol } = primordials; + +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, +} = require('internal/errors').codes; + +const kDestroy = Symbol('kDestroy'); +const kHandle = Symbol('kHandle'); + +// Histograms are created internally by Node.js and used to +// record various metrics. This Histogram class provides a +// generally read-only view of the internal histogram. +class Histogram { + #handle = undefined; + #map = new Map(); + + constructor(internal) { + this.#handle = internal; + } + + [kInspect]() { + const obj = { + min: this.min, + max: this.max, + mean: this.mean, + exceeds: this.exceeds, + stddev: this.stddev, + percentiles: this.percentiles, + }; + return `Histogram ${format(obj)}`; + } + + get min() { + return this.#handle ? this.#handle.min() : undefined; + } + + get max() { + return this.#handle ? this.#handle.max() : undefined; + } + + get mean() { + return this.#handle ? this.#handle.mean() : undefined; + } + + get exceeds() { + return this.#handle ? this.#handle.exceeds() : undefined; + } + + get stddev() { + return this.#handle ? this.#handle.stddev() : undefined; + } + + percentile(percentile) { + if (typeof percentile !== 'number') + throw new ERR_INVALID_ARG_TYPE('percentile', 'number', percentile); + + if (percentile <= 0 || percentile > 100) + throw new ERR_INVALID_ARG_VALUE.RangeError('percentile', percentile); + + return this.#handle ? this.#handle.percentile(percentile) : undefined; + } + + get percentiles() { + this.#map.clear(); + if (this.#handle) + this.#handle.percentiles(this.#map); + return this.#map; + } + + reset() { + if (this.#handle) + this.#handle.reset(); + } + + [kDestroy]() { + this.#handle = undefined; + } + + get [kHandle]() { return this.#handle; } +} + +module.exports = { + Histogram, + kDestroy, + kHandle, +}; diff --git a/lib/internal/http2/compat.js b/lib/internal/http2/compat.js index 25a80684621..3abe9ba2ac6 100644 --- a/lib/internal/http2/compat.js +++ b/lib/internal/http2/compat.js @@ -471,10 +471,8 @@ class Http2ServerResponse extends Stream { } get finished() { - const stream = this[kStream]; - return stream.destroyed || - stream._writableState.ended || - this[kState].closed; + const state = this[kState]; + return state.ending; } get socket() { @@ -700,12 +698,11 @@ class Http2ServerResponse extends Stream { if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - const isFinished = this.finished; state.headRequest = stream.headRequest; state.ending = true; if (typeof cb === 'function') { - if (isFinished) + if (stream.writableEnded) this.once('finish', cb); else stream.once('finish', cb); @@ -714,7 +711,7 @@ class Http2ServerResponse extends Stream { if (!stream.headersSent) this.writeHead(this[kState].statusCode); - if (isFinished) + if (this[kState].closed || stream.destroyed) onStreamCloseResponse.call(stream); else stream.end(); diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index a5ea81b55f1..a73101aa1fe 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -51,7 +51,7 @@ const { rekeySourceMap } = require('internal/source_map/source_map_cache'); const { pathToFileURL, fileURLToPath, URL } = require('internal/url'); -const { deprecate, emitExperimentalWarning } = require('internal/util'); +const { deprecate } = require('internal/util'); const vm = require('vm'); const assert = require('internal/assert'); const fs = require('fs'); @@ -86,6 +86,9 @@ const { ERR_INVALID_ARG_VALUE, ERR_INVALID_OPT_VALUE, ERR_INVALID_PACKAGE_CONFIG, + ERR_INVALID_PACKAGE_TARGET, + ERR_INVALID_MODULE_SPECIFIER, + ERR_PACKAGE_PATH_NOT_EXPORTED, ERR_REQUIRE_ESM } = require('internal/errors').codes; const { validateString } = require('internal/validators'); @@ -502,13 +505,9 @@ function applyExports(basePath, expansion) { if (ObjectPrototypeHasOwnProperty(pkgExports, mappingKey)) { const mapping = pkgExports[mappingKey]; return resolveExportsTarget(pathToFileURL(basePath + '/'), mapping, '', - basePath, mappingKey); + mappingKey); } - // Fallback to CJS main lookup when no main export is defined - if (mappingKey === '.') - return basePath; - let dirMatch = ''; for (const candidateKey of ObjectKeys(pkgExports)) { if (candidateKey[candidateKey.length - 1] !== '/') continue; @@ -522,18 +521,11 @@ function applyExports(basePath, expansion) { const mapping = pkgExports[dirMatch]; const subpath = StringPrototypeSlice(mappingKey, dirMatch.length); return resolveExportsTarget(pathToFileURL(basePath + '/'), mapping, - subpath, basePath, mappingKey); + subpath, mappingKey); } } - // Fallback to CJS main lookup when no main export is defined - if (mappingKey === '.') - return basePath; - // eslint-disable-next-line no-restricted-syntax - const e = new Error(`Package exports for '${basePath}' do not define ` + - `a '${mappingKey}' subpath`); - e.code = 'MODULE_NOT_FOUND'; - throw e; + throw new ERR_PACKAGE_PATH_NOT_EXPORTED(basePath, mappingKey); } // This only applies to requests of a specific form: @@ -568,39 +560,53 @@ function isArrayIndex(p) { return n >= 0 && n < (2 ** 32) - 1; } -function resolveExportsTarget(pkgPath, target, subpath, basePath, mappingKey) { +function resolveExportsTarget(baseUrl, target, subpath, mappingKey) { if (typeof target === 'string') { - if (target.startsWith('./') && - (subpath.length === 0 || target.endsWith('/'))) { - const resolvedTarget = new URL(target, pkgPath); - const pkgPathPath = pkgPath.pathname; - const resolvedTargetPath = resolvedTarget.pathname; - if (StringPrototypeStartsWith(resolvedTargetPath, pkgPathPath) && + let resolvedTarget, resolvedTargetPath; + const pkgPathPath = baseUrl.pathname; + if (StringPrototypeStartsWith(target, './')) { + resolvedTarget = new URL(target, baseUrl); + resolvedTargetPath = resolvedTarget.pathname; + if (!StringPrototypeStartsWith(resolvedTargetPath, pkgPathPath) || StringPrototypeIndexOf(resolvedTargetPath, '/node_modules/', - pkgPathPath.length - 1) === -1) { - const resolved = new URL(subpath, resolvedTarget); - const resolvedPath = resolved.pathname; - if (StringPrototypeStartsWith(resolvedPath, resolvedTargetPath) && - StringPrototypeIndexOf(resolvedPath, '/node_modules/', - pkgPathPath.length - 1) === -1) { - return fileURLToPath(resolved); - } - } + pkgPathPath.length - 1) !== -1) + resolvedTarget = undefined; } + if (subpath.length > 0 && target[target.length - 1] !== '/') + resolvedTarget = undefined; + if (resolvedTarget === undefined) + throw new ERR_INVALID_PACKAGE_TARGET(StringPrototypeSlice(baseUrl.pathname + , 0, -1), mappingKey, subpath, target); + const resolved = new URL(subpath, resolvedTarget); + const resolvedPath = resolved.pathname; + if (StringPrototypeStartsWith(resolvedPath, resolvedTargetPath) && + StringPrototypeIndexOf(resolvedPath, '/node_modules/', + pkgPathPath.length - 1) === -1) { + return fileURLToPath(resolved); + } + throw new ERR_INVALID_MODULE_SPECIFIER(StringPrototypeSlice(baseUrl.pathname + , 0, -1), mappingKey); } else if (ArrayIsArray(target)) { + if (target.length === 0) + throw new ERR_INVALID_PACKAGE_TARGET(StringPrototypeSlice(baseUrl.pathname + , 0, -1), mappingKey, subpath, target); for (const targetValue of target) { - if (ArrayIsArray(targetValue)) continue; try { - return resolveExportsTarget(pkgPath, targetValue, subpath, basePath, - mappingKey); + return resolveExportsTarget(baseUrl, targetValue, subpath, mappingKey); } catch (e) { - if (e.code !== 'MODULE_NOT_FOUND') throw e; + if (e.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED' && + e.code !== 'ERR_INVALID_PACKAGE_TARGET') + throw e; } } + // Throw last fallback error + resolveExportsTarget(baseUrl, target[target.length - 1], subpath, + mappingKey); + assert(false); } else if (typeof target === 'object' && target !== null) { const keys = ObjectKeys(target); if (keys.some(isArrayIndex)) { - throw new ERR_INVALID_PACKAGE_CONFIG(basePath, '"exports" cannot ' + + throw new ERR_INVALID_PACKAGE_CONFIG(baseUrl, '"exports" cannot ' + 'contain numeric property keys.'); } for (const p of keys) { @@ -608,35 +614,26 @@ function resolveExportsTarget(pkgPath, target, subpath, basePath, mappingKey) { case 'node': case 'require': try { - emitExperimentalWarning('Conditional exports'); - const result = resolveExportsTarget(pkgPath, target[p], subpath, - basePath, mappingKey); - return result; + return resolveExportsTarget(baseUrl, target[p], subpath, + mappingKey); } catch (e) { - if (e.code !== 'MODULE_NOT_FOUND') throw e; + if (e.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') throw e; } break; case 'default': try { - return resolveExportsTarget(pkgPath, target.default, subpath, - basePath, mappingKey); + return resolveExportsTarget(baseUrl, target.default, subpath, + mappingKey); } catch (e) { - if (e.code !== 'MODULE_NOT_FOUND') throw e; + if (e.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') throw e; } } } + throw new ERR_PACKAGE_PATH_NOT_EXPORTED( + StringPrototypeSlice(baseUrl.pathname, 0, -1), mappingKey + subpath); } - let e; - if (mappingKey !== '.') { - // eslint-disable-next-line no-restricted-syntax - e = new Error(`Package exports for '${basePath}' do not define a ` + - `valid '${mappingKey}' target${subpath ? ' for ' + subpath : ''}`); - } else { - // eslint-disable-next-line no-restricted-syntax - e = new Error(`No valid exports main found for '${basePath}'`); - } - e.code = 'MODULE_NOT_FOUND'; - throw e; + throw new ERR_INVALID_PACKAGE_TARGET( + StringPrototypeSlice(baseUrl.pathname, 0, -1), mappingKey, subpath, target); } Module._findPath = function(request, paths, isMain) { @@ -1010,7 +1007,6 @@ Module._resolveFilename = function(request, parent, isMain, options) { if (parent && parent.filename) { const filename = trySelf(parent.filename, isMain, request); if (filename) { - emitExperimentalWarning('Package name self resolution'); const cacheKey = request + '\x00' + (paths.length === 1 ? paths[0] : paths.join('\x00')); Module._pathCache[cacheKey] = filename; diff --git a/lib/internal/net.js b/lib/internal/net.js index 728c6f587a8..4a9a156aeab 100644 --- a/lib/internal/net.js +++ b/lib/internal/net.js @@ -41,15 +41,6 @@ function isIP(s) { return 0; } -// Check that the port number is not NaN when coerced to a number, -// is an integer and that it falls within the legal range of port numbers. -function isLegalPort(port) { - if ((typeof port !== 'number' && typeof port !== 'string') || - (typeof port === 'string' && port.trim().length === 0)) - return false; - return +port === (+port >>> 0) && port <= 0xFFFF; -} - function makeSyncWrite(fd) { return function(chunk, enc, cb) { if (enc !== 'buffer') @@ -72,7 +63,6 @@ module.exports = { isIP, isIPv4, isIPv6, - isLegalPort, makeSyncWrite, normalizedArgsSymbol: Symbol('normalizedArgs') }; diff --git a/lib/internal/source_map/source_map.js b/lib/internal/source_map/source_map.js index c440dffdf81..acff068be2a 100644 --- a/lib/internal/source_map/source_map.js +++ b/lib/internal/source_map/source_map.js @@ -152,10 +152,12 @@ class SourceMap { * @param {SourceMapV3} mappingPayload */ #parseMappingPayload = () => { - if (this.#payload.sections) + if (this.#payload.sections) { this.#parseSections(this.#payload.sections); - else + } else { this.#parseMap(this.#payload, 0, 0); + } + this.#mappings.sort(compareSourceMapEntry); } /** @@ -321,6 +323,21 @@ function cloneSourceMapV3(payload) { return payload; } +/** + * @param {Array} entry1 source map entry [lineNumber, columnNumber, sourceURL, + * sourceLineNumber, sourceColumnNumber] + * @param {Array} entry2 source map entry. + * @return {number} + */ +function compareSourceMapEntry(entry1, entry2) { + const [lineNumber1, columnNumber1] = entry1; + const [lineNumber2, columnNumber2] = entry2; + if (lineNumber1 !== lineNumber2) { + return lineNumber1 - lineNumber2; + } + return columnNumber1 - columnNumber2; +} + module.exports = { SourceMap }; diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index 170ddb3d14f..6eb46c7f7c4 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -7,6 +7,15 @@ function destroy(err, cb) { const r = this._readableState; const w = this._writableState; + if ((w && w.destroyed) || (r && r.destroyed)) { + if (typeof cb === 'function') { + // TODO(ronag): Invoke with `'close'`/`'error'`. + cb(); + } + + return this; + } + if (err) { if (w) { w.errored = true; @@ -16,16 +25,6 @@ function destroy(err, cb) { } } - if ((w && w.destroyed) || (r && r.destroyed)) { - if (cb) { - cb(err); - } else if (err) { - process.nextTick(emitErrorNT, this, err); - } - - return this; - } - // We set destroyed to true before firing error callbacks in order // to make it re-entrance safe in case destroy() is called within callbacks @@ -53,13 +52,11 @@ function destroy(err, cb) { r.closed = true; } - if (cb) { - // Invoke callback before scheduling emitClose so that callback - // can schedule before. + if (typeof cb === 'function') { cb(err); - // Don't emit 'error' if passed a callback. - process.nextTick(emitCloseNT, this); - } else if (err) { + } + + if (err) { process.nextTick(emitErrorCloseNT, this, err); } else { process.nextTick(emitCloseNT, this); @@ -138,6 +135,10 @@ function errorOrDestroy(stream, err, sync) { const r = stream._readableState; const w = stream._writableState; + if ((w && w.destroyed) || (r && r.destroyed)) { + return this; + } + if ((r && r.autoDestroy) || (w && w.autoDestroy)) stream.destroy(err); else if (err) { @@ -162,7 +163,6 @@ function isRequest(stream) { // Normalize destroy for legacy. function destroyer(stream, err) { - // request.destroy just do .end - .abort is what we want if (isRequest(stream)) return stream.abort(); if (isRequest(stream.req)) return stream.req.abort(); if (typeof stream.destroy === 'function') return stream.destroy(err); diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js index fcbca7d21a5..aad43823f9a 100644 --- a/lib/internal/streams/end-of-stream.js +++ b/lib/internal/streams/end-of-stream.js @@ -34,6 +34,13 @@ function isWritableFinished(stream) { function nop() {} +function isReadableEnded(stream) { + if (stream.readableEnded) return true; + const rState = stream._readableState; + if (!rState || rState.errored) return false; + return rState.endEmitted || (rState.ended && rState.length === 0); +} + function eos(stream, opts, callback) { if (arguments.length === 2) { callback = opts; @@ -49,9 +56,9 @@ function eos(stream, opts, callback) { callback = once(callback); - let readable = opts.readable || + const readable = opts.readable || (opts.readable !== false && isReadable(stream)); - let writable = opts.writable || + const writable = opts.writable || (opts.writable !== false && isWritable(stream)); const wState = stream._writableState; @@ -62,19 +69,17 @@ function eos(stream, opts, callback) { }; let writableFinished = stream.writableFinished || - (rState && rState.finished); + (wState && wState.finished); const onfinish = () => { - writable = false; writableFinished = true; - if (!readable) callback.call(stream); + if (!readable || readableEnded) callback.call(stream); }; let readableEnded = stream.readableEnded || (rState && rState.endEmitted); const onend = () => { - readable = false; readableEnded = true; - if (!writable) callback.call(stream); + if (!writable || writableFinished) callback.call(stream); }; const onerror = (err) => { @@ -82,17 +87,15 @@ function eos(stream, opts, callback) { }; const onclose = () => { - let err; if (readable && !readableEnded) { - if (!rState || !rState.ended) - err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); + if (!isReadableEnded(stream)) + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); } if (writable && !writableFinished) { if (!isWritableFinished(stream)) - err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); } + callback.call(stream); }; const onrequest = () => { diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 1ead5cdf9f3..f5535bbff05 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -25,31 +25,41 @@ let EE; let PassThrough; let createReadableStreamAsyncIterator; -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - - let closed = false; - stream.on('close', () => { - closed = true; +function destroyer(stream, reading, writing, final, callback) { + const _destroy = once((err) => { + const readable = stream.readable || isRequest(stream); + if (err || !final || !readable) { + destroyImpl.destroyer(stream, err); + } + callback(err); }); if (eos === undefined) eos = require('internal/streams/end-of-stream'); eos(stream, { readable: reading, writable: writing }, (err) => { - if (err) return callback(err); - closed = true; - callback(); + const rState = stream._readableState; + if ( + err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' && + reading && + (rState && rState.ended && !rState.errored && !rState.errorEmitted) + ) { + // Some readable streams will emit 'close' before 'end'. However, since + // this is on the readable side 'end' should still be emitted if the + // stream has been ended and no error emitted. This should be allowed in + // favor of backwards compatibility. Since the stream is piped to a + // destination this should not result in any observable difference. + // We don't need to check if this is a writable premature close since + // eos will only fail with premature close on the reading side for + // duplex streams. + stream + .once('end', _destroy) + .once('error', _destroy); + } else { + _destroy(err); + } }); - let destroyed = false; - return (err) => { - if (closed) return; - if (destroyed) return; - destroyed = true; - - destroyImpl.destroyer(stream, err); - - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; + return (err) => _destroy(err || new ERR_STREAM_DESTROYED('pipe')); } function popCallback(streams) { @@ -61,6 +71,10 @@ function popCallback(streams) { return streams.pop(); } +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + function isPromise(obj) { return !!(obj && typeof obj.then === 'function'); } @@ -132,9 +146,10 @@ function pipeline(...streams) { } let error; + let value; const destroys = []; - function finish(err, val, final) { + function finish(err, final) { if (!error && err) { error = err; } @@ -146,13 +161,13 @@ function pipeline(...streams) { } if (final) { - callback(error, val); + callback(error, value); } } function wrap(stream, reading, writing, final) { - destroys.push(destroyer(stream, reading, writing, (err) => { - finish(err, null, final); + destroys.push(destroyer(stream, reading, writing, final, (err) => { + finish(err, final); })); } @@ -194,15 +209,19 @@ function pipeline(...streams) { PassThrough = require('_stream_passthrough'); } + // If the last argument to pipeline is not a stream + // we must create a proxy stream so that pipeline(...) + // always returns a stream which can be further + // composed through `.pipe(stream)`. + const pt = new PassThrough(); if (isPromise(ret)) { ret .then((val) => { + value = val; pt.end(val); - finish(null, val, true); - }) - .catch((err) => { - finish(err, null, true); + }, (err) => { + pt.destroy(err); }); } else if (isIterable(ret, true)) { pump(ret, pt, finish); @@ -212,7 +231,7 @@ function pipeline(...streams) { } ret = pt; - wrap(ret, true, false, true); + wrap(ret, false, true, true); } } else if (isStream(stream)) { if (isReadable(ret)) { @@ -229,6 +248,9 @@ function pipeline(...streams) { } } + // TODO(ronag): Consider returning a Duplex proxy if the first argument + // is a writable. Would improve composability. + // See, https://github.com/nodejs/node/issues/32020 return ret; } diff --git a/lib/internal/validators.js b/lib/internal/validators.js index b7c3711d614..46237e54342 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -10,6 +10,7 @@ const { const { hideStackFrames, codes: { + ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, @@ -180,6 +181,19 @@ function validateEncoding(data, encoding) { } } +// Check that the port number is not NaN when coerced to a number, +// is an integer and that it falls within the legal range of port numbers. +function validatePort(port, name = 'Port', { allowZero = true } = {}) { + if ((typeof port !== 'number' && typeof port !== 'string') || + (typeof port === 'string' && port.trim().length === 0) || + +port !== (+port >>> 0) || + port > 0xFFFF || + (port === 0 && !allowZero)) { + throw new ERR_SOCKET_BAD_PORT(name, port); + } + return port | 0; +} + module.exports = { isInt32, isUint32, @@ -188,11 +202,12 @@ module.exports = { validateBoolean, validateBuffer, validateEncoding, - validateObject, - validateInteger, validateInt32, - validateUint32, - validateString, + validateInteger, validateNumber, - validateSignalName + validateObject, + validatePort, + validateSignalName, + validateString, + validateUint32, }; diff --git a/lib/internal/worker.js b/lib/internal/worker.js index b690ab82deb..7fc45695851 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -25,6 +25,8 @@ const { ERR_WORKER_UNSUPPORTED_EXTENSION, ERR_WORKER_INVALID_EXEC_ARGV, ERR_INVALID_ARG_TYPE, + // eslint-disable-next-line no-unused-vars + ERR_WORKER_INIT_FAILED, } = errorCodes; const { validateString } = require('internal/validators'); const { getOptionValue } = require('internal/options'); @@ -104,7 +106,7 @@ class Worker extends EventEmitter { filename = path.resolve(filename); const ext = path.extname(filename); - if (!/^\.[cm]?js$/.test(ext)) { + if (ext !== '.js' && ext !== '.mjs' && ext !== '.cjs') { throw new ERR_WORKER_UNSUPPORTED_EXTENSION(ext); } } @@ -136,7 +138,9 @@ class Worker extends EventEmitter { throw new ERR_WORKER_INVALID_EXEC_ARGV( this[kHandle].invalidNodeOptions, 'invalid NODE_OPTIONS env variable'); } - this[kHandle].onexit = (code, customErr) => this[kOnExit](code, customErr); + this[kHandle].onexit = (code, customErr, customErrReason) => { + this[kOnExit](code, customErr, customErrReason); + }; this[kPort] = this[kHandle].messagePort; this[kPort].on('message', (data) => this[kOnMessage](data)); this[kPort].start(); @@ -181,14 +185,15 @@ class Worker extends EventEmitter { this[kHandle].startThread(); } - [kOnExit](code, customErr) { + [kOnExit](code, customErr, customErrReason) { debug(`[${threadId}] hears end event for Worker ${this.threadId}`); drainMessagePort(this[kPublicPort]); drainMessagePort(this[kPort]); this[kDispose](); if (customErr) { - debug(`[${threadId}] failing with custom error ${customErr}`); - this.emit('error', new errorCodes[customErr]()); + debug(`[${threadId}] failing with custom error ${customErr} \ + and with reason {customErrReason}`); + this.emit('error', new errorCodes[customErr](customErrReason)); } this.emit('exit', code); this.removeAllListeners(); diff --git a/lib/net.js b/lib/net.js index 9ff6f12b9bc..0d3cc29db64 100644 --- a/lib/net.js +++ b/lib/net.js @@ -41,7 +41,6 @@ const { isIP, isIPv4, isIPv6, - isLegalPort, normalizedArgsSymbol, makeSyncWrite } = require('internal/net'); @@ -92,7 +91,6 @@ const { ERR_INVALID_OPT_VALUE, ERR_SERVER_ALREADY_LISTEN, ERR_SERVER_NOT_RUNNING, - ERR_SOCKET_BAD_PORT, ERR_SOCKET_CLOSED }, errnoException, @@ -100,7 +98,11 @@ const { uvExceptionWithHostPort } = require('internal/errors'); const { isUint8Array } = require('internal/util/types'); -const { validateInt32, validateString } = require('internal/validators'); +const { + validateInt32, + validatePort, + validateString +} = require('internal/validators'); const kLastWriteQueueSize = Symbol('lastWriteQueueSize'); const { DTRACE_NET_SERVER_CONNECTION, @@ -998,9 +1000,7 @@ function lookupAndConnect(self, options) { throw new ERR_INVALID_ARG_TYPE('options.port', ['number', 'string'], port); } - if (!isLegalPort(port)) { - throw new ERR_SOCKET_BAD_PORT(port); - } + validatePort(port); } port |= 0; @@ -1437,9 +1437,7 @@ Server.prototype.listen = function(...args) { // or if options.port is normalized as 0 before let backlog; if (typeof options.port === 'number' || typeof options.port === 'string') { - if (!isLegalPort(options.port)) { - throw new ERR_SOCKET_BAD_PORT(options.port); - } + validatePort(options.port, 'options.port'); backlog = options.backlog || backlogFromArgs; // start TCP server listening on host:port if (options.host) { diff --git a/lib/os.js b/lib/os.js index f986b1cf878..395a372a00f 100644 --- a/lib/os.js +++ b/lib/os.js @@ -45,9 +45,8 @@ const { getHostname: _getHostname, getInterfaceAddresses: _getInterfaceAddresses, getLoadAvg, - getOSRelease: _getOSRelease, - getOSType: _getOSType, getPriority: _getPriority, + getOSInformation: _getOSInformation, getTotalMem, getUserInfo, getUptime, @@ -66,17 +65,25 @@ function getCheckedFunction(fn) { }); } +const [ + type, + version, + release +] = _getOSInformation(); + const getHomeDirectory = getCheckedFunction(_getHomeDirectory); const getHostname = getCheckedFunction(_getHostname); const getInterfaceAddresses = getCheckedFunction(_getInterfaceAddresses); -const getOSRelease = getCheckedFunction(_getOSRelease); -const getOSType = getCheckedFunction(_getOSType); +const getOSRelease = () => release; +const getOSType = () => type; +const getOSVersion = () => version; getFreeMem[SymbolToPrimitive] = () => getFreeMem(); getHostname[SymbolToPrimitive] = () => getHostname(); -getHomeDirectory[SymbolToPrimitive] = () => getHomeDirectory(); -getOSRelease[SymbolToPrimitive] = () => getOSRelease(); +getOSVersion[SymbolToPrimitive] = () => getOSVersion(); getOSType[SymbolToPrimitive] = () => getOSType(); +getOSRelease[SymbolToPrimitive] = () => getOSRelease(); +getHomeDirectory[SymbolToPrimitive] = () => getHomeDirectory(); getTotalMem[SymbolToPrimitive] = () => getTotalMem(); getUptime[SymbolToPrimitive] = () => getUptime(); @@ -281,6 +288,7 @@ module.exports = { type: getOSType, userInfo, uptime: getUptime, + version: getOSVersion }; ObjectDefineProperties(module.exports, { diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 267b4577ffe..a1417057282 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -3,7 +3,6 @@ const { ArrayIsArray, Boolean, - Map, NumberIsSafeInteger, ObjectDefineProperties, ObjectDefineProperty, @@ -52,16 +51,18 @@ const kInspect = require('internal/util').customInspectSymbol; const { ERR_INVALID_CALLBACK, - ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_INVALID_OPT_VALUE, ERR_VALID_PERFORMANCE_ENTRY_TYPE, ERR_INVALID_PERFORMANCE_MARK } = require('internal/errors').codes; +const { + Histogram, + kHandle, +} = require('internal/histogram'); + const { setImmediate } = require('timers'); -const kHandle = Symbol('handle'); -const kMap = Symbol('map'); const kCallback = Symbol('callback'); const kTypes = Symbol('types'); const kEntries = Symbol('entries'); @@ -557,47 +558,9 @@ function sortedInsert(list, entry) { list.splice(location, 0, entry); } -class ELDHistogram { - constructor(handle) { - this[kHandle] = handle; - this[kMap] = new Map(); - } - - reset() { this[kHandle].reset(); } +class ELDHistogram extends Histogram { enable() { return this[kHandle].enable(); } disable() { return this[kHandle].disable(); } - - get exceeds() { return this[kHandle].exceeds(); } - get min() { return this[kHandle].min(); } - get max() { return this[kHandle].max(); } - get mean() { return this[kHandle].mean(); } - get stddev() { return this[kHandle].stddev(); } - percentile(percentile) { - if (typeof percentile !== 'number') { - throw new ERR_INVALID_ARG_TYPE('percentile', 'number', percentile); - } - if (percentile <= 0 || percentile > 100) { - throw new ERR_INVALID_ARG_VALUE.RangeError('percentile', - percentile); - } - return this[kHandle].percentile(percentile); - } - get percentiles() { - this[kMap].clear(); - this[kHandle].percentiles(this[kMap]); - return this[kMap]; - } - - [kInspect]() { - return { - min: this.min, - max: this.max, - mean: this.mean, - stddev: this.stddev, - percentiles: this.percentiles, - exceeds: this.exceeds - }; - } } function monitorEventLoopDelay(options = {}) { diff --git a/lib/repl.js b/lib/repl.js index a87e3c01c9b..00820953b55 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -326,12 +326,12 @@ function REPLServer(prompt, let awaitPromise = false; const input = code; - if (/^\s*{/.test(code) && /}\s*$/.test(code)) { - // It's confusing for `{ a : 1 }` to be interpreted as a block - // statement rather than an object literal. So, we first try - // to wrap it in parentheses, so that it will be interpreted as - // an expression. Note that if the above condition changes, - // lib/internal/repl/utils.js needs to be changed to match. + // It's confusing for `{ a : 1 }` to be interpreted as a block + // statement rather than an object literal. So, we first try + // to wrap it in parentheses, so that it will be interpreted as + // an expression. Note that if the above condition changes, + // lib/internal/repl/utils.js needs to be changed to match. + if (/^\s*{/.test(code) && !/;\s*$/.test(code)) { code = `(${code.trim()})\n`; wrappedCmd = true; } diff --git a/lib/vm.js b/lib/vm.js index 1d0715af528..c2d8908703b 100644 --- a/lib/vm.js +++ b/lib/vm.js @@ -24,16 +24,21 @@ const { ArrayPrototypeForEach, Symbol, + PromiseReject } = primordials; const { ContextifyScript, makeContext, isContext: _isContext, - compileFunction: _compileFunction + constants, + compileFunction: _compileFunction, + measureMemory: _measureMemory, } = internalBinding('contextify'); const { + ERR_CONTEXT_NOT_INITIALIZED, ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, } = require('internal/errors').codes; const { isArrayBufferView, @@ -47,7 +52,10 @@ const { validateBuffer, validateObject, } = require('internal/validators'); -const { kVmBreakFirstLineSymbol } = require('internal/util'); +const { + kVmBreakFirstLineSymbol, + emitExperimentalWarning, +} = require('internal/util'); const kParsingContext = Symbol('script parsing context'); class Script extends ContextifyScript { @@ -355,6 +363,30 @@ function compileFunction(code, params, options = {}) { return result.function; } +const measureMemoryModes = { + summary: constants.measureMemory.mode.SUMMARY, + detailed: constants.measureMemory.mode.DETAILED, +}; + +function measureMemory(options = {}) { + emitExperimentalWarning('vm.measureMemory'); + validateObject(options, 'options'); + const { mode = 'summary', context } = options; + if (mode !== 'summary' && mode !== 'detailed') { + throw new ERR_INVALID_ARG_VALUE( + 'options.mode', options.mode, + 'must be either \'summary\' or \'detailed\''); + } + if (context !== undefined && + (typeof context !== 'object' || context === null || !_isContext(context))) { + throw new ERR_INVALID_ARG_TYPE('options.context', 'vm.Context', context); + } + const result = _measureMemory(measureMemoryModes[mode], context); + if (result === undefined) { + return PromiseReject(new ERR_CONTEXT_NOT_INITIALIZED()); + } + return result; +} module.exports = { Script, @@ -365,6 +397,7 @@ module.exports = { runInThisContext, isContext, compileFunction, + measureMemory, }; if (require('internal/options').getOptionValue('--experimental-vm-modules')) { diff --git a/lib/wasi.js b/lib/wasi.js index 8cc1a09f5e7..ac47350666a 100644 --- a/lib/wasi.js +++ b/lib/wasi.js @@ -15,6 +15,7 @@ const { } = require('internal/errors').codes; const { emitExperimentalWarning } = require('internal/util'); const { WASI: _WASI } = internalBinding('wasi'); +const kExitCode = Symbol('exitCode'); const kSetMemory = Symbol('setMemory'); const kStarted = Symbol('started'); @@ -26,7 +27,7 @@ class WASI { if (options === null || typeof options !== 'object') throw new ERR_INVALID_ARG_TYPE('options', 'object', options); - const { env, preopens } = options; + const { env, preopens, returnOnExit = false } = options; let { args = [] } = options; if (ArrayIsArray(args)) @@ -56,16 +57,26 @@ class WASI { throw new ERR_INVALID_ARG_TYPE('options.preopens', 'Object', preopens); } + if (typeof returnOnExit !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE( + 'options.returnOnExit', 'boolean', returnOnExit); + } + const wrap = new _WASI(args, envPairs, preopenArray); for (const prop in wrap) { wrap[prop] = FunctionPrototypeBind(wrap[prop], wrap); } + if (returnOnExit) { + wrap.proc_exit = FunctionPrototypeBind(wasiReturnOnProcExit, this); + } + this[kSetMemory] = wrap._setMemory; delete wrap._setMemory; this.wasiImport = wrap; this[kStarted] = false; + this[kExitCode] = 0; } start(instance) { @@ -93,12 +104,30 @@ class WASI { this[kStarted] = true; this[kSetMemory](memory); - if (exports._start) - exports._start(); - else if (exports.__wasi_unstable_reactor_start) - exports.__wasi_unstable_reactor_start(); + try { + if (exports._start) + exports._start(); + else if (exports.__wasi_unstable_reactor_start) + exports.__wasi_unstable_reactor_start(); + } catch (err) { + if (err !== kExitCode) { + throw err; + } + } + + return this[kExitCode]; } } module.exports = { WASI }; + + +function wasiReturnOnProcExit(rval) { + // If __wasi_proc_exit() does not terminate the process, an assertion is + // triggered in the wasm runtime. Node can sidestep the assertion and return + // an exit code by recording the exit code, and throwing a JavaScript + // exception that WebAssembly cannot catch. + this[kExitCode] = rval; + throw kExitCode; +} diff --git a/node.gyp b/node.gyp index 8c7911732b9..170501477a6 100644 --- a/node.gyp +++ b/node.gyp @@ -51,6 +51,7 @@ 'lib/domain.js', 'lib/events.js', 'lib/fs.js', + 'lib/fs/promises.js', 'lib/http.js', 'lib/http2.js', 'lib/_http_agent.js', @@ -140,6 +141,7 @@ 'lib/internal/fs/watchers.js', 'lib/internal/http.js', 'lib/internal/heap_utils.js', + 'lib/internal/histogram.js', 'lib/internal/idna.js', 'lib/internal/inspector_async_hook.js', 'lib/internal/js_stream_socket.js', @@ -249,6 +251,11 @@ 'node_mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)node_mksnapshot<(EXECUTABLE_SUFFIX)', 'mkcodecache_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mkcodecache<(EXECUTABLE_SUFFIX)', 'conditions': [ + ['GENERATOR == "ninja"', { + 'node_text_start_object_path': 'src/large_pages/node_text_start.node_text_start.o' + }, { + 'node_text_start_object_path': 'node_text_start/src/large_pages/node_text_start.o' + }], [ 'node_shared=="true"', { 'node_target_type%': 'shared_library', 'conditions': [ @@ -312,6 +319,19 @@ }, 'targets': [ + { + 'target_name': 'node_text_start', + 'type': 'none', + 'conditions': [ + [ 'OS=="linux" and ' + 'target_arch=="x64"', { + 'type': 'static_library', + 'sources': [ + 'src/large_pages/node_text_start.S' + ] + }], + ] + }, { 'target_name': '<(node_core_target_name)', 'type': 'executable', @@ -497,6 +517,13 @@ 'src/node_snapshot_stub.cc' ], }], + [ 'OS=="linux" and ' + 'target_arch=="x64"', { + 'dependencies': [ 'node_text_start' ], + 'ldflags+': [ + '<(obj_dir)/<(node_text_start_object_path)' + ] + }], ], }, # node_core_target_name { @@ -533,6 +560,7 @@ 'src/fs_event_wrap.cc', 'src/handle_wrap.cc', 'src/heap_utils.cc', + 'src/histogram.cc', 'src/js_native_api.h', 'src/js_native_api_types.h', 'src/js_native_api_v8.cc', @@ -570,6 +598,7 @@ 'src/node_process_methods.cc', 'src/node_process_object.cc', 'src/node_serdes.cc', + 'src/node_sockaddr.cc', 'src/node_stat_watcher.cc', 'src/node_symbols.cc', 'src/node_task_queue.cc', @@ -619,6 +648,8 @@ 'src/histogram.h', 'src/histogram-inl.h', 'src/js_stream.h', + 'src/large_pages/node_large_page.cc', + 'src/large_pages/node_large_page.h', 'src/memory_tracker.h', 'src/memory_tracker-inl.h', 'src/module_wrap.h', @@ -634,6 +665,8 @@ 'src/node_errors.h', 'src/node_file.h', 'src/node_file-inl.h', + 'src/node_http_common.h', + 'src/node_http_common-inl.h', 'src/node_http2.h', 'src/node_http2_state.h', 'src/node_i18n.h', @@ -655,6 +688,8 @@ 'src/node_process.h', 'src/node_revert.h', 'src/node_root_certs.h', + 'src/node_sockaddr.h', + 'src/node_sockaddr-inl.h', 'src/node_stat_watcher.h', 'src/node_union_bytes.h', 'src/node_url.h', @@ -818,9 +853,11 @@ [ 'node_use_openssl=="true"', { 'sources': [ 'src/node_crypto.cc', + 'src/node_crypto_common.cc', 'src/node_crypto_bio.cc', 'src/node_crypto_clienthello.cc', 'src/node_crypto.h', + 'src/node_crypto_common.h', 'src/node_crypto_bio.h', 'src/node_crypto_clienthello.h', 'src/node_crypto_clienthello-inl.h', @@ -850,10 +887,6 @@ 'target_arch=="x64" and ' 'node_target_type=="executable"', { 'defines': [ 'NODE_ENABLE_LARGE_CODE_PAGES=1' ], - 'sources': [ - 'src/large_pages/node_large_page.cc', - 'src/large_pages/node_large_page.h' - ], }], [ 'use_openssl_def==1', { # TODO(bnoordhuis) Make all platforms export the same list of symbols. @@ -1123,6 +1156,7 @@ 'test/cctest/test_linked_binding.cc', 'test/cctest/test_per_process.cc', 'test/cctest/test_platform.cc', + 'test/cctest/test_sockaddr.cc', 'test/cctest/test_traced_value.cc', 'test/cctest/test_util.cc', 'test/cctest/test_url.cc', @@ -1225,6 +1259,16 @@ ], 'conditions': [ + [ 'node_use_openssl=="true"', { + 'defines': [ + 'HAVE_OPENSSL=1', + ], + }], + ['v8_enable_inspector==1', { + 'defines': [ + 'HAVE_INSPECTOR=1', + ], + }], ['OS=="win"', { 'libraries': [ 'dbghelp.lib', @@ -1269,6 +1313,16 @@ ], 'conditions': [ + [ 'node_use_openssl=="true"', { + 'defines': [ + 'HAVE_OPENSSL=1', + ], + }], + ['v8_enable_inspector==1', { + 'defines': [ + 'HAVE_INSPECTOR=1', + ], + }], ['OS=="win"', { 'libraries': [ 'Dbghelp.lib', diff --git a/doc/onboarding.md b/onboarding.md similarity index 100% rename from doc/onboarding.md rename to onboarding.md diff --git a/src/README.md b/src/README.md index 40790b278ac..2e59c51c3c3 100644 --- a/src/README.md +++ b/src/README.md @@ -903,7 +903,7 @@ static void GetUserInfo(const FunctionCallbackInfo& args) { [`v8.h` in Node.js master]: https://github.com/nodejs/node/blob/master/deps/v8/include/v8.h [`v8.h` in V8 master]: https://github.com/v8/v8/blob/master/include/v8.h [`vm` module]: https://nodejs.org/api/vm.html -[C++ coding style]: ../CPP_STYLE_GUIDE.md +[C++ coding style]: ../doc/guides/cpp-style-guide.md [Callback scopes]: #callback-scopes [JavaScript value handles]: #js-handles [N-API]: https://nodejs.org/api/n-api.html diff --git a/src/aliased_buffer.h b/src/aliased_buffer.h index b083fb68e69..281c8fed581 100644 --- a/src/aliased_buffer.h +++ b/src/aliased_buffer.h @@ -4,7 +4,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include -#include "util.h" +#include "util-inl.h" #include "v8.h" namespace node { diff --git a/src/api/callback.cc b/src/api/callback.cc index 74a7836391d..f7e7ddedfae 100644 --- a/src/api/callback.cc +++ b/src/api/callback.cc @@ -62,16 +62,16 @@ InternalCallbackScope::InternalCallbackScope(Environment* env, // If you hit this assertion, you forgot to enter the v8::Context first. CHECK_EQ(Environment::GetCurrent(env->isolate()), env); + env->async_hooks()->push_async_context( + async_context_.async_id, async_context_.trigger_async_id, object); + + pushed_ids_ = true; + if (asyncContext.async_id != 0 && !skip_hooks_) { // No need to check a return value because the application will exit if // an exception occurs. AsyncWrap::EmitBefore(env, asyncContext.async_id); } - - env->async_hooks()->push_async_context(async_context_.async_id, - async_context_.trigger_async_id, object); - - pushed_ids_ = true; } InternalCallbackScope::~InternalCallbackScope() { @@ -88,15 +88,15 @@ void InternalCallbackScope::Close() { env_->async_hooks()->clear_async_id_stack(); } + if (!failed_ && async_context_.async_id != 0 && !skip_hooks_) { + AsyncWrap::EmitAfter(env_, async_context_.async_id); + } + if (pushed_ids_) env_->async_hooks()->pop_async_context(async_context_.async_id); if (failed_) return; - if (async_context_.async_id != 0 && !skip_hooks_) { - AsyncWrap::EmitAfter(env_, async_context_.async_id); - } - if (env_->async_callback_scope_depth() > 1 || skip_task_queues_) { return; } @@ -139,6 +139,7 @@ void InternalCallbackScope::Close() { } MaybeLocal InternalMakeCallback(Environment* env, + Local resource, Local recv, const Local callback, int argc, @@ -150,7 +151,7 @@ MaybeLocal InternalMakeCallback(Environment* env, CHECK(!argv[i].IsEmpty()); #endif - InternalCallbackScope scope(env, recv, asyncContext); + InternalCallbackScope scope(env, resource, asyncContext); if (scope.Failed()) { return MaybeLocal(); } @@ -224,7 +225,7 @@ MaybeLocal MakeCallback(Isolate* isolate, CHECK_NOT_NULL(env); Context::Scope context_scope(env->context()); MaybeLocal ret = - InternalMakeCallback(env, recv, callback, argc, argv, asyncContext); + InternalMakeCallback(env, recv, recv, callback, argc, argv, asyncContext); if (ret.IsEmpty() && env->async_callback_scope_depth() == 0) { // This is only for legacy compatibility and we may want to look into // removing/adjusting it. diff --git a/src/api/environment.cc b/src/api/environment.cc index 0e1812c369c..456854a318e 100644 --- a/src/api/environment.cc +++ b/src/api/environment.cc @@ -347,23 +347,8 @@ Environment* CreateEnvironment(IsolateData* isolate_data, Environment::kOwnsProcessState | Environment::kOwnsInspector)); env->InitializeLibuv(per_process::v8_is_profiling); - if (env->RunBootstrapping().IsEmpty()) { + if (env->RunBootstrapping().IsEmpty()) return nullptr; - } - - std::vector> parameters = { - env->require_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "markBootstrapComplete")}; - std::vector> arguments = { - env->native_module_require(), - env->NewFunctionTemplate(MarkBootstrapComplete) - ->GetFunction(env->context()) - .ToLocalChecked()}; - if (ExecuteBootstrapper( - env, "internal/bootstrap/environment", ¶meters, &arguments) - .IsEmpty()) { - return nullptr; - } return env; } @@ -405,7 +390,8 @@ MaybeLocal GetPerContextExports(Local context) { return handle_scope.Escape(existing_value.As()); Local exports = Object::New(isolate); - if (context->Global()->SetPrivate(context, key, exports).IsNothing()) + if (context->Global()->SetPrivate(context, key, exports).IsNothing() || + !InitializePrimordials(context)) return MaybeLocal(); return handle_scope.Escape(exports); } @@ -461,49 +447,50 @@ bool InitializeContextForSnapshot(Local context) { context->SetEmbedderData(ContextEmbedderIndex::kAllowWasmCodeGeneration, True(isolate)); + return InitializePrimordials(context); +} + +bool InitializePrimordials(Local context) { + // Run per-context JS files. + Isolate* isolate = context->GetIsolate(); + Context::Scope context_scope(context); + Local exports; + + Local primordials_string = + FIXED_ONE_BYTE_STRING(isolate, "primordials"); + Local global_string = FIXED_ONE_BYTE_STRING(isolate, "global"); + Local exports_string = FIXED_ONE_BYTE_STRING(isolate, "exports"); + + // Create primordials first and make it available to per-context scripts. + Local primordials = Object::New(isolate); + if (!primordials->SetPrototype(context, Null(isolate)).FromJust() || + !GetPerContextExports(context).ToLocal(&exports) || + !exports->Set(context, primordials_string, primordials).FromJust()) { + return false; + } - { - // Run per-context JS files. - Context::Scope context_scope(context); - Local exports; - - Local primordials_string = - FIXED_ONE_BYTE_STRING(isolate, "primordials"); - Local global_string = FIXED_ONE_BYTE_STRING(isolate, "global"); - Local exports_string = FIXED_ONE_BYTE_STRING(isolate, "exports"); - - // Create primordials first and make it available to per-context scripts. - Local primordials = Object::New(isolate); - if (!primordials->SetPrototype(context, Null(isolate)).FromJust() || - !GetPerContextExports(context).ToLocal(&exports) || - !exports->Set(context, primordials_string, primordials).FromJust()) { + static const char* context_files[] = {"internal/per_context/primordials", + "internal/per_context/domexception", + "internal/per_context/messageport", + nullptr}; + + for (const char** module = context_files; *module != nullptr; module++) { + std::vector> parameters = { + global_string, exports_string, primordials_string}; + Local arguments[] = {context->Global(), exports, primordials}; + MaybeLocal maybe_fn = + native_module::NativeModuleEnv::LookupAndCompile( + context, *module, ¶meters, nullptr); + if (maybe_fn.IsEmpty()) { return false; } - - static const char* context_files[] = {"internal/per_context/primordials", - "internal/per_context/domexception", - "internal/per_context/messageport", - nullptr}; - - for (const char** module = context_files; *module != nullptr; module++) { - std::vector> parameters = { - global_string, exports_string, primordials_string}; - Local arguments[] = {context->Global(), exports, primordials}; - MaybeLocal maybe_fn = - native_module::NativeModuleEnv::LookupAndCompile( - context, *module, ¶meters, nullptr); - if (maybe_fn.IsEmpty()) { - return false; - } - Local fn = maybe_fn.ToLocalChecked(); - MaybeLocal result = - fn->Call(context, Undefined(isolate), - arraysize(arguments), arguments); - // Execution failed during context creation. - // TODO(joyeecheung): deprecate this signature and return a MaybeLocal. - if (result.IsEmpty()) { - return false; - } + Local fn = maybe_fn.ToLocalChecked(); + MaybeLocal result = + fn->Call(context, Undefined(isolate), arraysize(arguments), arguments); + // Execution failed during context creation. + // TODO(joyeecheung): deprecate this signature and return a MaybeLocal. + if (result.IsEmpty()) { + return false; } } diff --git a/src/async_wrap-inl.h b/src/async_wrap-inl.h index e3e48666e4f..03745081f3b 100644 --- a/src/async_wrap-inl.h +++ b/src/async_wrap-inl.h @@ -74,9 +74,8 @@ inline v8::MaybeLocal AsyncWrap::MakeCallback( if (!object()->Get(env()->context(), symbol).ToLocal(&cb_v)) return v8::MaybeLocal(); if (!cb_v->IsFunction()) { - // TODO(addaleax): We should throw an error here to fulfill the - // `MaybeLocal<>` API contract. - return v8::MaybeLocal(); + v8::Isolate* isolate = env()->isolate(); + return Undefined(isolate); } return MakeCallback(cb_v.As(), argc, argv); } diff --git a/src/async_wrap.cc b/src/async_wrap.cc index 315d4177c8c..b35cdca08a6 100644 --- a/src/async_wrap.cc +++ b/src/async_wrap.cc @@ -176,6 +176,10 @@ void AsyncWrap::EmitAfter(Environment* env, double async_id) { class PromiseWrap : public AsyncWrap { public: + enum InternalFields { + kIsChainedPromiseField = AsyncWrap::kInternalFieldCount, + kInternalFieldCount + }; PromiseWrap(Environment* env, Local object, bool silent) : AsyncWrap(env, object, PROVIDER_PROMISE, kInvalidAsyncId, silent) { MakeWeak(); @@ -185,9 +189,6 @@ class PromiseWrap : public AsyncWrap { SET_MEMORY_INFO_NAME(PromiseWrap) SET_SELF_SIZE(PromiseWrap) - static constexpr int kIsChainedPromiseField = 1; - static constexpr int kInternalFieldCount = 2; - static PromiseWrap* New(Environment* env, Local promise, PromiseWrap* parent_wrap, @@ -214,15 +215,16 @@ PromiseWrap* PromiseWrap::New(Environment* env, void PromiseWrap::getIsChainedPromise(Local property, const PropertyCallbackInfo& info) { info.GetReturnValue().Set( - info.Holder()->GetInternalField(kIsChainedPromiseField)); + info.Holder()->GetInternalField(PromiseWrap::kIsChainedPromiseField)); } static PromiseWrap* extractPromiseWrap(Local promise) { - Local resource_object_value = promise->GetInternalField(0); - if (resource_object_value->IsObject()) { - return Unwrap(resource_object_value.As()); - } - return nullptr; + // This check is imperfect. If the internal field is set, it should + // be an object. If it's not, we just ignore it. Ideally v8 would + // have had GetInternalField returning a MaybeLocal but this works + // for now. + Local obj = promise->GetInternalField(0); + return obj->IsObject() ? Unwrap(obj.As()) : nullptr; } static void PromiseHook(PromiseHookType type, Local promise, @@ -560,7 +562,7 @@ void AsyncWrap::Initialize(Local target, function_template->SetClassName(class_name); function_template->Inherit(AsyncWrap::GetConstructorTemplate(env)); auto instance_template = function_template->InstanceTemplate(); - instance_template->SetInternalFieldCount(1); + instance_template->SetInternalFieldCount(AsyncWrap::kInternalFieldCount); auto function = function_template->GetFunction(env->context()).ToLocalChecked(); target->Set(env->context(), class_name, function).Check(); @@ -747,7 +749,7 @@ MaybeLocal AsyncWrap::MakeCallback(const Local cb, ProviderType provider = provider_type(); async_context context { get_async_id(), get_trigger_async_id() }; MaybeLocal ret = InternalMakeCallback( - env(), object(), cb, argc, argv, context); + env(), GetResource(), object(), cb, argc, argv, context); // This is a static call with cached values because the `this` object may // no longer be alive at this point. diff --git a/src/base_object-inl.h b/src/base_object-inl.h index 8b2b30021a8..c9b4e1491fc 100644 --- a/src/base_object-inl.h +++ b/src/base_object-inl.h @@ -43,7 +43,9 @@ BaseObject::BaseObject(Environment* env, v8::Local object) : persistent_handle_(env->isolate(), object), env_(env) { CHECK_EQ(false, object.IsEmpty()); CHECK_GT(object->InternalFieldCount(), 0); - object->SetAlignedPointerInInternalField(0, static_cast(this)); + object->SetAlignedPointerInInternalField( + BaseObject::kSlot, + static_cast(this)); env->AddCleanupHook(DeleteMe, static_cast(this)); env->modify_base_object_count(1); } @@ -67,7 +69,7 @@ BaseObject::~BaseObject() { { v8::HandleScope handle_scope(env()->isolate()); - object()->SetAlignedPointerInInternalField(0, nullptr); + object()->SetAlignedPointerInInternalField(BaseObject::kSlot, nullptr); } } @@ -100,7 +102,8 @@ Environment* BaseObject::env() const { BaseObject* BaseObject::FromJSObject(v8::Local obj) { CHECK_GT(obj->InternalFieldCount(), 0); - return static_cast(obj->GetAlignedPointerFromInternalField(0)); + return static_cast( + obj->GetAlignedPointerFromInternalField(BaseObject::kSlot)); } @@ -148,11 +151,12 @@ BaseObject::MakeLazilyInitializedJSTemplate(Environment* env) { auto constructor = [](const v8::FunctionCallbackInfo& args) { DCHECK(args.IsConstructCall()); DCHECK_GT(args.This()->InternalFieldCount(), 0); - args.This()->SetAlignedPointerInInternalField(0, nullptr); + args.This()->SetAlignedPointerInInternalField(BaseObject::kSlot, nullptr); }; v8::Local t = env->NewFunctionTemplate(constructor); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + BaseObject::kInternalFieldCount); return t; } diff --git a/src/base_object.h b/src/base_object.h index e7ef029995f..2c67445c31f 100644 --- a/src/base_object.h +++ b/src/base_object.h @@ -36,6 +36,8 @@ class BaseObjectPtrImpl; class BaseObject : public MemoryRetainer { public: + enum InternalFields { kSlot, kInternalFieldCount }; + // Associates this object with `object`. It uses the 0th internal field for // that, and in particular aborts if there is no such field. inline BaseObject(Environment* env, v8::Local object); diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index f7a02e469aa..8abf662caa3 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -749,16 +749,12 @@ Local AddrTTLToArray(Environment* env, const T* addrttls, size_t naddrttls) { auto isolate = env->isolate(); - EscapableHandleScope escapable_handle_scope(isolate); - auto context = env->context(); - Local ttls = Array::New(isolate, naddrttls); - for (size_t i = 0; i < naddrttls; i++) { - auto value = Integer::NewFromUnsigned(isolate, addrttls[i].ttl); - ttls->Set(context, i, value).Check(); - } + MaybeStackBuffer, 8> ttls(naddrttls); + for (size_t i = 0; i < naddrttls; i++) + ttls[i] = Integer::NewFromUnsigned(isolate, addrttls[i].ttl); - return escapable_handle_scope.Escape(ttls); + return Array::New(isolate, ttls.out(), naddrttls); } @@ -2039,6 +2035,7 @@ void GetServers(const FunctionCallbackInfo& args) { int r = ares_get_servers_ports(channel->cares_channel(), &servers); CHECK_EQ(r, ARES_SUCCESS); + auto cleanup = OnScopeLeave([&]() { ares_free_data(servers); }); ares_addr_port_node* cur = servers; @@ -2049,17 +2046,18 @@ void GetServers(const FunctionCallbackInfo& args) { int err = uv_inet_ntop(cur->family, caddr, ip, sizeof(ip)); CHECK_EQ(err, 0); - Local ret = Array::New(env->isolate(), 2); - ret->Set(env->context(), 0, OneByteString(env->isolate(), ip)).Check(); - ret->Set(env->context(), - 1, - Integer::New(env->isolate(), cur->udp_port)).Check(); + Local ret[] = { + OneByteString(env->isolate(), ip), + Integer::New(env->isolate(), cur->udp_port) + }; - server_array->Set(env->context(), i, ret).Check(); + if (server_array->Set(env->context(), i, + Array::New(env->isolate(), ret, arraysize(ret))) + .IsNothing()) { + return; + } } - ares_free_data(servers); - args.GetReturnValue().Set(server_array); } @@ -2225,7 +2223,8 @@ void Initialize(Local target, Local channel_wrap = env->NewFunctionTemplate(ChannelWrap::New); - channel_wrap->InstanceTemplate()->SetInternalFieldCount(1); + channel_wrap->InstanceTemplate()->SetInternalFieldCount( + ChannelWrap::kInternalFieldCount); channel_wrap->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(channel_wrap, "queryAny", Query); diff --git a/src/debug_utils-inl.h b/src/debug_utils-inl.h index 2f6137700d8..ae2d2046486 100644 --- a/src/debug_utils-inl.h +++ b/src/debug_utils-inl.h @@ -4,6 +4,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "debug_utils.h" +#include "env.h" #include @@ -90,6 +91,93 @@ void COLD_NOINLINE FPrintF(FILE* file, const char* format, Args&&... args) { FWrite(file, SPrintF(format, std::forward(args)...)); } +template +inline void FORCE_INLINE Debug(EnabledDebugList* list, + DebugCategory cat, + const char* format, + Args&&... args) { + if (!UNLIKELY(list->enabled(cat))) return; + FPrintF(stderr, format, std::forward(args)...); +} + +inline void FORCE_INLINE Debug(EnabledDebugList* list, + DebugCategory cat, + const char* message) { + if (!UNLIKELY(list->enabled(cat))) return; + FPrintF(stderr, "%s", message); +} + +template +inline void FORCE_INLINE +Debug(Environment* env, DebugCategory cat, const char* format, Args&&... args) { + Debug(env->enabled_debug_list(), cat, format, std::forward(args)...); +} + +inline void FORCE_INLINE Debug(Environment* env, + DebugCategory cat, + const char* message) { + Debug(env->enabled_debug_list(), cat, message); +} + +template +inline void Debug(Environment* env, + DebugCategory cat, + const std::string& format, + Args&&... args) { + Debug(env->enabled_debug_list(), + cat, + format.c_str(), + std::forward(args)...); +} + +// Used internally by the 'real' Debug(AsyncWrap*, ...) functions below, so that +// the FORCE_INLINE flag on them doesn't apply to the contents of this function +// as well. +// We apply COLD_NOINLINE to tell the compiler that it's not worth optimizing +// this function for speed and it should rather focus on keeping it out of +// hot code paths. In particular, we want to keep the string concatenating code +// out of the function containing the original `Debug()` call. +template +void COLD_NOINLINE UnconditionalAsyncWrapDebug(AsyncWrap* async_wrap, + const char* format, + Args&&... args) { + Debug(async_wrap->env(), + static_cast(async_wrap->provider_type()), + async_wrap->diagnostic_name() + " " + format + "\n", + std::forward(args)...); +} + +template +inline void FORCE_INLINE Debug(AsyncWrap* async_wrap, + const char* format, + Args&&... args) { + DCHECK_NOT_NULL(async_wrap); + DebugCategory cat = static_cast(async_wrap->provider_type()); + if (!UNLIKELY(async_wrap->env()->enabled_debug_list()->enabled(cat))) return; + UnconditionalAsyncWrapDebug(async_wrap, format, std::forward(args)...); +} + +template +inline void FORCE_INLINE Debug(AsyncWrap* async_wrap, + const std::string& format, + Args&&... args) { + Debug(async_wrap, format.c_str(), std::forward(args)...); +} + +namespace per_process { + +template +inline void FORCE_INLINE Debug(DebugCategory cat, + const char* format, + Args&&... args) { + Debug(&enabled_debug_list, cat, format, std::forward(args)...); +} + +inline void FORCE_INLINE Debug(DebugCategory cat, const char* message) { + Debug(&enabled_debug_list, cat, message); +} + +} // namespace per_process } // namespace node #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/debug_utils.cc b/src/debug_utils.cc index 4553b642b65..a601c5ecf40 100644 --- a/src/debug_utils.cc +++ b/src/debug_utils.cc @@ -1,5 +1,6 @@ #include "debug_utils-inl.h" // NOLINT(build/include) #include "env-inl.h" +#include "node_internals.h" #ifdef __POSIX__ #if defined(__linux__) @@ -53,6 +54,37 @@ #endif // _WIN32 namespace node { +namespace per_process { +EnabledDebugList enabled_debug_list; +} + +void EnabledDebugList::Parse(Environment* env) { + std::string cats; + credentials::SafeGetenv("NODE_DEBUG_NATIVE", &cats, env); + Parse(cats, true); +} + +void EnabledDebugList::Parse(const std::string& cats, bool enabled) { + std::string debug_categories = cats; + while (!debug_categories.empty()) { + std::string::size_type comma_pos = debug_categories.find(','); + std::string wanted = ToLower(debug_categories.substr(0, comma_pos)); + +#define V(name) \ + { \ + static const std::string available_category = ToLower(#name); \ + if (available_category.find(wanted) != std::string::npos) \ + set_enabled(DebugCategory::name, enabled); \ + } + + DEBUG_CATEGORY_NAMES(V) +#undef V + + if (comma_pos == std::string::npos) break; + // Use everything after the `,` as the list for the next iteration. + debug_categories = debug_categories.substr(comma_pos + 1); + } +} #ifdef __POSIX__ #if HAVE_EXECINFO_H diff --git a/src/debug_utils.h b/src/debug_utils.h index c745cbe0a1a..b654159ac2a 100644 --- a/src/debug_utils.h +++ b/src/debug_utils.h @@ -4,7 +4,6 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "async_wrap.h" -#include "env.h" #include #include @@ -21,6 +20,7 @@ #endif namespace node { +class Environment; template inline std::string ToString(const T& value); @@ -36,31 +36,72 @@ template inline void FPrintF(FILE* file, const char* format, Args&&... args); void FWrite(FILE* file, const std::string& str); +// Listing the AsyncWrap provider types first enables us to cast directly +// from a provider type to a debug category. +#define DEBUG_CATEGORY_NAMES(V) \ + NODE_ASYNC_PROVIDER_TYPES(V) \ + V(INSPECTOR_SERVER) \ + V(INSPECTOR_PROFILER) \ + V(CODE_CACHE) \ + V(WASI) + +enum class DebugCategory { +#define V(name) name, + DEBUG_CATEGORY_NAMES(V) +#undef V + CATEGORY_COUNT +}; + +class EnabledDebugList { + public: + bool enabled(DebugCategory category) const { + DCHECK_GE(static_cast(category), 0); + DCHECK_LT(static_cast(category), + static_cast(DebugCategory::CATEGORY_COUNT)); + return enabled_[static_cast(category)]; + } + + // Uses NODE_DEBUG_NATIVE to initialize the categories. When env is not a + // nullptr, the environment variables set in the Environment are used. + // Otherwise the system environment variables are used. + void Parse(Environment* env); + + private: + // Set all categories matching cats to the value of enabled. + void Parse(const std::string& cats, bool enabled); + void set_enabled(DebugCategory category, bool enabled) { + DCHECK_GE(static_cast(category), 0); + DCHECK_LT(static_cast(category), + static_cast(DebugCategory::CATEGORY_COUNT)); + enabled_[static_cast(category)] = true; + } + + bool enabled_[static_cast(DebugCategory::CATEGORY_COUNT)] = {false}; +}; + template -inline void FORCE_INLINE Debug(Environment* env, +inline void FORCE_INLINE Debug(EnabledDebugList* list, DebugCategory cat, const char* format, - Args&&... args) { - if (!UNLIKELY(env->debug_enabled(cat))) - return; - FPrintF(stderr, format, std::forward(args)...); -} + Args&&... args); + +inline void FORCE_INLINE Debug(EnabledDebugList* list, + DebugCategory cat, + const char* message); + +template +inline void FORCE_INLINE +Debug(Environment* env, DebugCategory cat, const char* format, Args&&... args); inline void FORCE_INLINE Debug(Environment* env, DebugCategory cat, - const char* message) { - if (!UNLIKELY(env->debug_enabled(cat))) - return; - FPrintF(stderr, "%s", message); -} + const char* message); template inline void Debug(Environment* env, DebugCategory cat, const std::string& format, - Args&&... args) { - Debug(env, cat, format.c_str(), std::forward(args)...); -} + Args&&... args); // Used internally by the 'real' Debug(AsyncWrap*, ...) functions below, so that // the FORCE_INLINE flag on them doesn't apply to the contents of this function @@ -72,31 +113,17 @@ inline void Debug(Environment* env, template void COLD_NOINLINE UnconditionalAsyncWrapDebug(AsyncWrap* async_wrap, const char* format, - Args&&... args) { - Debug(async_wrap->env(), - static_cast(async_wrap->provider_type()), - async_wrap->diagnostic_name() + " " + format + "\n", - std::forward(args)...); -} + Args&&... args); template inline void FORCE_INLINE Debug(AsyncWrap* async_wrap, const char* format, - Args&&... args) { - DCHECK_NOT_NULL(async_wrap); - DebugCategory cat = - static_cast(async_wrap->provider_type()); - if (!UNLIKELY(async_wrap->env()->debug_enabled(cat))) - return; - UnconditionalAsyncWrapDebug(async_wrap, format, std::forward(args)...); -} + Args&&... args); template inline void FORCE_INLINE Debug(AsyncWrap* async_wrap, const std::string& format, - Args&&... args) { - Debug(async_wrap, format.c_str(), std::forward(args)...); -} + Args&&... args); // Debug helper for inspecting the currently running `node` executable. class NativeSymbolDebuggingContext { @@ -135,6 +162,16 @@ class NativeSymbolDebuggingContext { void CheckedUvLoopClose(uv_loop_t* loop); void PrintLibuvHandleInformation(uv_loop_t* loop, FILE* stream); +namespace per_process { +extern EnabledDebugList enabled_debug_list; + +template +inline void FORCE_INLINE Debug(DebugCategory cat, + const char* format, + Args&&... args); + +inline void FORCE_INLINE Debug(DebugCategory cat, const char* message); +} // namespace per_process } // namespace node #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/env-inl.h b/src/env-inl.h index 7f29b93cc12..69b7316e405 100644 --- a/src/env-inl.h +++ b/src/env-inl.h @@ -609,20 +609,6 @@ inline void Environment::set_http2_state( http2_state_ = std::move(buffer); } -bool Environment::debug_enabled(DebugCategory category) const { - DCHECK_GE(static_cast(category), 0); - DCHECK_LT(static_cast(category), - static_cast(DebugCategory::CATEGORY_COUNT)); - return debug_enabled_[static_cast(category)]; -} - -void Environment::set_debug_enabled(DebugCategory category, bool enabled) { - DCHECK_GE(static_cast(category), 0); - DCHECK_LT(static_cast(category), - static_cast(DebugCategory::CATEGORY_COUNT)); - debug_enabled_[static_cast(category)] = enabled; -} - inline AliasedFloat64Array* Environment::fs_stats_field_array() { return &fs_stats_field_array_; } diff --git a/src/env.cc b/src/env.cc index e54e367f0be..a3ee9158c49 100644 --- a/src/env.cc +++ b/src/env.cc @@ -1,6 +1,7 @@ #include "env.h" #include "async_wrap.h" +#include "debug_utils-inl.h" #include "memory_tracker-inl.h" #include "node_buffer.h" #include "node_context_data.h" @@ -315,6 +316,7 @@ Environment::Environment(IsolateData* isolate_data, Context::Scope context_scope(context); set_env_vars(per_process::system_environment); + enabled_debug_list_.Parse(this); // We create new copies of the per-Environment option sets, so that it is // easier to modify them after Environment creation. The defaults are @@ -375,10 +377,6 @@ Environment::Environment(IsolateData* isolate_data, // By default, always abort when --abort-on-uncaught-exception was passed. should_abort_on_uncaught_toggle_[0] = 1; - std::string debug_cats; - credentials::SafeGetenv("NODE_DEBUG_NATIVE", &debug_cats, this); - set_debug_categories(debug_cats, true); - if (options_->no_force_async_hooks_checks) { async_hooks_.no_force_checks(); } @@ -864,29 +862,6 @@ Local Environment::GetNow() { return Number::New(isolate(), static_cast(now)); } -void Environment::set_debug_categories(const std::string& cats, bool enabled) { - std::string debug_categories = cats; - while (!debug_categories.empty()) { - std::string::size_type comma_pos = debug_categories.find(','); - std::string wanted = ToLower(debug_categories.substr(0, comma_pos)); - -#define V(name) \ - { \ - static const std::string available_category = ToLower(#name); \ - if (available_category.find(wanted) != std::string::npos) \ - set_debug_enabled(DebugCategory::name, enabled); \ - } - - DEBUG_CATEGORY_NAMES(V) -#undef V - - if (comma_pos == std::string::npos) - break; - // Use everything after the `,` as the list for the next iteration. - debug_categories = debug_categories.substr(comma_pos + 1); - } -} - void CollectExceptionInfo(Environment* env, Local obj, int errorno, diff --git a/src/env.h b/src/env.h index 3b577e40307..13af1874050 100644 --- a/src/env.h +++ b/src/env.h @@ -29,6 +29,7 @@ #include "inspector_agent.h" #include "inspector_profiler.h" #endif +#include "debug_utils.h" #include "handle_wrap.h" #include "node.h" #include "node_binding.h" @@ -206,6 +207,7 @@ constexpr size_t kFsStatsBufferLength = V(dest_string, "dest") \ V(destroyed_string, "destroyed") \ V(detached_string, "detached") \ + V(dh_string, "DH") \ V(dns_a_string, "A") \ V(dns_aaaa_string, "AAAA") \ V(dns_cname_string, "CNAME") \ @@ -219,6 +221,7 @@ constexpr size_t kFsStatsBufferLength = V(done_string, "done") \ V(dot_string, ".") \ V(duration_string, "duration") \ + V(ecdh_string, "ECDH") \ V(emit_warning_string, "emitWarning") \ V(empty_object_string, "{}") \ V(encoding_string, "encoding") \ @@ -406,6 +409,7 @@ constexpr size_t kFsStatsBufferLength = V(filehandlereadwrap_template, v8::ObjectTemplate) \ V(fsreqpromise_constructor_template, v8::ObjectTemplate) \ V(handle_wrap_ctor_template, v8::FunctionTemplate) \ + V(histogram_instance_template, v8::ObjectTemplate) \ V(http2settings_constructor_template, v8::ObjectTemplate) \ V(http2stream_constructor_template, v8::ObjectTemplate) \ V(http2ping_constructor_template, v8::ObjectTemplate) \ @@ -507,7 +511,7 @@ class IsolateData : public MemoryRetainer { #undef VS #undef VP - std::unordered_map> http2_static_strs; + std::unordered_map> http_static_strs; inline v8::Isolate* isolate() const; IsolateData(const IsolateData&) = delete; IsolateData& operator=(const IsolateData&) = delete; @@ -547,20 +551,7 @@ struct ContextInfo { bool is_default = false; }; -// Listing the AsyncWrap provider types first enables us to cast directly -// from a provider type to a debug category. -#define DEBUG_CATEGORY_NAMES(V) \ - NODE_ASYNC_PROVIDER_TYPES(V) \ - V(INSPECTOR_SERVER) \ - V(INSPECTOR_PROFILER) \ - V(WASI) - -enum class DebugCategory { -#define V(name) name, - DEBUG_CATEGORY_NAMES(V) -#undef V - CATEGORY_COUNT -}; +class EnabledDebugList; // A unique-pointer-ish object that is compatible with the JS engine's // ArrayBuffer::Allocator. @@ -605,11 +596,13 @@ class KVStore { virtual v8::MaybeLocal Get(v8::Isolate* isolate, v8::Local key) const = 0; + virtual v8::Maybe Get(const char* key) const = 0; virtual void Set(v8::Isolate* isolate, v8::Local key, v8::Local value) = 0; virtual int32_t Query(v8::Isolate* isolate, v8::Local key) const = 0; + virtual int32_t Query(const char* key) const = 0; virtual void Delete(v8::Isolate* isolate, v8::Local key) = 0; virtual v8::Local Enumerate(v8::Isolate* isolate) const = 0; @@ -1022,9 +1015,7 @@ class Environment : public MemoryRetainer { inline http2::Http2State* http2_state() const; inline void set_http2_state(std::unique_ptr state); - inline bool debug_enabled(DebugCategory category) const; - inline void set_debug_enabled(DebugCategory category, bool enabled); - void set_debug_categories(const std::string& cats, bool enabled); + EnabledDebugList* enabled_debug_list() { return &enabled_debug_list_; } inline AliasedFloat64Array* fs_stats_field_array(); inline AliasedBigUint64Array* fs_stats_field_bigint_array(); @@ -1384,9 +1375,7 @@ class Environment : public MemoryRetainer { bool http_parser_buffer_in_use_ = false; std::unique_ptr http2_state_; - bool debug_enabled_[static_cast(DebugCategory::CATEGORY_COUNT)] = { - false}; - + EnabledDebugList enabled_debug_list_; AliasedFloat64Array fs_stats_field_array_; AliasedBigUint64Array fs_stats_field_bigint_array_; diff --git a/src/fs_event_wrap.cc b/src/fs_event_wrap.cc index d38556b1bf8..858455bff2d 100644 --- a/src/fs_event_wrap.cc +++ b/src/fs_event_wrap.cc @@ -97,7 +97,8 @@ void FSEventWrap::Initialize(Local target, auto fsevent_string = FIXED_ONE_BYTE_STRING(env->isolate(), "FSEvent"); Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + FSEventWrap::kInternalFieldCount); t->SetClassName(fsevent_string); t->Inherit(AsyncWrap::GetConstructorTemplate(env)); diff --git a/src/heap_utils.cc b/src/heap_utils.cc index 68cd532c230..c21ff8c8006 100644 --- a/src/heap_utils.cc +++ b/src/heap_utils.cc @@ -341,7 +341,7 @@ BaseObjectPtr CreateHeapSnapshotStream( Local os = FunctionTemplate::New(env->isolate()); os->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local ost = os->InstanceTemplate(); - ost->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + ost->SetInternalFieldCount(StreamBase::kInternalFieldCount); os->SetClassName( FIXED_ONE_BYTE_STRING(env->isolate(), "HeapSnapshotStream")); StreamBase::AddMethods(env, os); diff --git a/src/histogram-inl.h b/src/histogram-inl.h index 3135041f738..58911dae8f2 100644 --- a/src/histogram-inl.h +++ b/src/histogram-inl.h @@ -4,58 +4,78 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "histogram.h" +#include "base_object-inl.h" #include "node_internals.h" namespace node { -inline Histogram::Histogram(int64_t lowest, int64_t highest, int figures) { - CHECK_EQ(0, hdr_init(lowest, highest, figures, &histogram_)); +void Histogram::Reset() { + hdr_reset(histogram_.get()); } -inline Histogram::~Histogram() { - hdr_close(histogram_); +bool Histogram::Record(int64_t value) { + return hdr_record_value(histogram_.get(), value); } -inline void Histogram::Reset() { - hdr_reset(histogram_); +int64_t Histogram::Min() { + return hdr_min(histogram_.get()); } -inline bool Histogram::Record(int64_t value) { - return hdr_record_value(histogram_, value); +int64_t Histogram::Max() { + return hdr_max(histogram_.get()); } -inline int64_t Histogram::Min() { - return hdr_min(histogram_); +double Histogram::Mean() { + return hdr_mean(histogram_.get()); } -inline int64_t Histogram::Max() { - return hdr_max(histogram_); +double Histogram::Stddev() { + return hdr_stddev(histogram_.get()); } -inline double Histogram::Mean() { - return hdr_mean(histogram_); -} - -inline double Histogram::Stddev() { - return hdr_stddev(histogram_); -} - -inline double Histogram::Percentile(double percentile) { +double Histogram::Percentile(double percentile) { CHECK_GT(percentile, 0); CHECK_LE(percentile, 100); - return hdr_value_at_percentile(histogram_, percentile); + return static_cast( + hdr_value_at_percentile(histogram_.get(), percentile)); } -inline void Histogram::Percentiles(std::function fn) { +template +void Histogram::Percentiles(Iterator&& fn) { hdr_iter iter; - hdr_iter_percentile_init(&iter, histogram_, 1); + hdr_iter_percentile_init(&iter, histogram_.get(), 1); while (hdr_iter_next(&iter)) { double key = iter.specifics.percentiles.percentile; - double value = iter.value; + double value = static_cast(iter.value); fn(key, value); } } +bool HistogramBase::RecordDelta() { + uint64_t time = uv_hrtime(); + bool ret = true; + if (prev_ > 0) { + int64_t delta = time - prev_; + if (delta > 0) { + ret = Record(delta); + TraceDelta(delta); + if (!ret) { + if (exceeds_ < 0xFFFFFFFF) + exceeds_++; + TraceExceeds(delta); + } + } + } + prev_ = time; + return ret; +} + +void HistogramBase::ResetState() { + Reset(); + exceeds_ = 0; + prev_ = 0; +} + } // namespace node #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/histogram.cc b/src/histogram.cc new file mode 100644 index 00000000000..8d1eb77b1bc --- /dev/null +++ b/src/histogram.cc @@ -0,0 +1,141 @@ +#include "histogram.h" // NOLINT(build/include_inline) +#include "histogram-inl.h" +#include "memory_tracker-inl.h" + +namespace node { + +using v8::FunctionCallbackInfo; +using v8::FunctionTemplate; +using v8::Local; +using v8::Map; +using v8::Number; +using v8::ObjectTemplate; +using v8::String; +using v8::Value; + +Histogram::Histogram(int64_t lowest, int64_t highest, int figures) { + hdr_histogram* histogram; + CHECK_EQ(0, hdr_init(lowest, highest, figures, &histogram)); + histogram_.reset(histogram); +} + +HistogramBase::HistogramBase( + Environment* env, + v8::Local wrap, + int64_t lowest, + int64_t highest, + int figures) + : BaseObject(env, wrap), + Histogram(lowest, highest, figures) { + MakeWeak(); +} + +void HistogramBase::MemoryInfo(MemoryTracker* tracker) const { + tracker->TrackFieldWithSize("histogram", GetMemorySize()); +} + +void HistogramBase::GetMin(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + double value = static_cast(histogram->Min()); + args.GetReturnValue().Set(value); +} + +void HistogramBase::GetMax(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + double value = static_cast(histogram->Max()); + args.GetReturnValue().Set(value); +} + +void HistogramBase::GetMean(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + args.GetReturnValue().Set(histogram->Mean()); +} + +void HistogramBase::GetExceeds(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + double value = static_cast(histogram->Exceeds()); + args.GetReturnValue().Set(value); +} + +void HistogramBase::GetStddev(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + args.GetReturnValue().Set(histogram->Stddev()); +} + +void HistogramBase::GetPercentile( + const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + CHECK(args[0]->IsNumber()); + double percentile = args[0].As()->Value(); + args.GetReturnValue().Set(histogram->Percentile(percentile)); +} + +void HistogramBase::GetPercentiles( + const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + CHECK(args[0]->IsMap()); + Local map = args[0].As(); + histogram->Percentiles([map, env](double key, double value) { + map->Set( + env->context(), + Number::New(env->isolate(), key), + Number::New(env->isolate(), value)).IsEmpty(); + }); +} + +void HistogramBase::DoReset(const FunctionCallbackInfo& args) { + HistogramBase* histogram; + ASSIGN_OR_RETURN_UNWRAP(&histogram, args.Holder()); + histogram->ResetState(); +} + +BaseObjectPtr HistogramBase::New( + Environment* env, + int64_t lowest, + int64_t highest, + int figures) { + CHECK_LE(lowest, highest); + CHECK_GT(figures, 0); + v8::Local obj; + auto tmpl = env->histogram_instance_template(); + if (!tmpl->NewInstance(env->context()).ToLocal(&obj)) + return {}; + + return MakeDetachedBaseObject( + env, obj, lowest, highest, figures); +} + +void HistogramBase::Initialize(Environment* env) { + // Guard against multiple initializations + if (!env->histogram_instance_template().IsEmpty()) + return; + + Local histogram = FunctionTemplate::New(env->isolate()); + Local classname = FIXED_ONE_BYTE_STRING(env->isolate(), "Histogram"); + histogram->SetClassName(classname); + + Local histogramt = + histogram->InstanceTemplate(); + + histogramt->SetInternalFieldCount(1); + env->SetProtoMethod(histogram, "exceeds", HistogramBase::GetExceeds); + env->SetProtoMethod(histogram, "min", HistogramBase::GetMin); + env->SetProtoMethod(histogram, "max", HistogramBase::GetMax); + env->SetProtoMethod(histogram, "mean", HistogramBase::GetMean); + env->SetProtoMethod(histogram, "stddev", HistogramBase::GetStddev); + env->SetProtoMethod(histogram, "percentile", HistogramBase::GetPercentile); + env->SetProtoMethod(histogram, "percentiles", HistogramBase::GetPercentiles); + env->SetProtoMethod(histogram, "reset", HistogramBase::DoReset); + + env->set_histogram_instance_template(histogramt); +} + +} // namespace node diff --git a/src/histogram.h b/src/histogram.h index eb94af5da2a..e92c31c4724 100644 --- a/src/histogram.h +++ b/src/histogram.h @@ -4,15 +4,24 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "hdr_histogram.h" +#include "base_object.h" +#include "util.h" + #include +#include #include namespace node { +constexpr int kDefaultHistogramFigures = 3; + class Histogram { public: - inline Histogram(int64_t lowest, int64_t highest, int figures = 3); - inline virtual ~Histogram(); + Histogram( + int64_t lowest = std::numeric_limits::min(), + int64_t highest = std::numeric_limits::max(), + int figures = kDefaultHistogramFigures); + virtual ~Histogram() = default; inline bool Record(int64_t value); inline void Reset(); @@ -21,14 +30,65 @@ class Histogram { inline double Mean(); inline double Stddev(); inline double Percentile(double percentile); - inline void Percentiles(std::function fn); + + // Iterator is a function type that takes two doubles as argument, one for + // percentile and one for the value at that percentile. + template + inline void Percentiles(Iterator&& fn); size_t GetMemorySize() const { - return hdr_get_memory_size(histogram_); + return hdr_get_memory_size(histogram_.get()); } private: - hdr_histogram* histogram_; + using HistogramPointer = DeleteFnPtr; + HistogramPointer histogram_; +}; + +class HistogramBase : public BaseObject, public Histogram { + public: + virtual ~HistogramBase() = default; + + virtual void TraceDelta(int64_t delta) {} + virtual void TraceExceeds(int64_t delta) {} + + inline bool RecordDelta(); + inline void ResetState(); + + int64_t Exceeds() const { return exceeds_; } + + void MemoryInfo(MemoryTracker* tracker) const override; + SET_MEMORY_INFO_NAME(HistogramBase) + SET_SELF_SIZE(HistogramBase) + + static void GetMin(const v8::FunctionCallbackInfo& args); + static void GetMax(const v8::FunctionCallbackInfo& args); + static void GetMean(const v8::FunctionCallbackInfo& args); + static void GetExceeds(const v8::FunctionCallbackInfo& args); + static void GetStddev(const v8::FunctionCallbackInfo& args); + static void GetPercentile( + const v8::FunctionCallbackInfo& args); + static void GetPercentiles( + const v8::FunctionCallbackInfo& args); + static void DoReset(const v8::FunctionCallbackInfo& args); + static void Initialize(Environment* env); + + static BaseObjectPtr New( + Environment* env, + int64_t lowest = std::numeric_limits::min(), + int64_t highest = std::numeric_limits::max(), + int figures = kDefaultHistogramFigures); + + HistogramBase( + Environment* env, + v8::Local wrap, + int64_t lowest = std::numeric_limits::min(), + int64_t highest = std::numeric_limits::max(), + int figures = kDefaultHistogramFigures); + + private: + int64_t exceeds_ = 0; + uint64_t prev_ = 0; }; } // namespace node diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 703c9ff598f..ed3b36ad5ca 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -105,7 +105,8 @@ class JSBindingsConnection : public AsyncWrap { Local class_name = ConnectionType::GetClassName(env); Local tmpl = env->NewFunctionTemplate(JSBindingsConnection::New); - tmpl->InstanceTemplate()->SetInternalFieldCount(1); + tmpl->InstanceTemplate()->SetInternalFieldCount( + JSBindingsConnection::kInternalFieldCount); tmpl->SetClassName(class_name); tmpl->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(tmpl, "dispatch", JSBindingsConnection::Dispatch); diff --git a/src/js_native_api.h b/src/js_native_api.h index cb69fde5d19..2675da505c2 100644 --- a/src/js_native_api.h +++ b/src/js_native_api.h @@ -4,7 +4,6 @@ // This file needs to be compatible with C compilers. #include // NOLINT(modernize-deprecated-headers) #include // NOLINT(modernize-deprecated-headers) -#include "js_native_api_types.h" // Use INT_MAX, this should only be consumed by the pre-processor anyway. #define NAPI_VERSION_EXPERIMENTAL 2147483647 @@ -18,10 +17,12 @@ // functions available in a new version of N-API that is not yet ported in all // LTS versions, they can set NAPI_VERSION knowing that they have specifically // depended on that version. -#define NAPI_VERSION 5 +#define NAPI_VERSION 6 #endif #endif +#include "js_native_api_types.h" + // If you need __declspec(dllimport), either include instead, or // define NAPI_EXTERN as __declspec(dllimport) on the compiler's command line. #ifndef NAPI_EXTERN @@ -478,7 +479,7 @@ NAPI_EXTERN napi_status napi_add_finalizer(napi_env env, #endif // NAPI_VERSION >= 5 -#ifdef NAPI_EXPERIMENTAL +#if NAPI_VERSION >= 6 // BigInt NAPI_EXTERN napi_status napi_create_bigint_int64(napi_env env, @@ -523,7 +524,9 @@ NAPI_EXTERN napi_status napi_set_instance_data(napi_env env, NAPI_EXTERN napi_status napi_get_instance_data(napi_env env, void** data); +#endif // NAPI_VERSION >= 6 +#ifdef NAPI_EXPERIMENTAL // ArrayBuffer detaching NAPI_EXTERN napi_status napi_detach_arraybuffer(napi_env env, napi_value arraybuffer); diff --git a/src/js_native_api_types.h b/src/js_native_api_types.h index ef44dd457db..7a49fc9f719 100644 --- a/src/js_native_api_types.h +++ b/src/js_native_api_types.h @@ -115,7 +115,7 @@ typedef struct { napi_status error_code; } napi_extended_error_info; -#ifdef NAPI_EXPERIMENTAL +#if NAPI_VERSION >= 6 typedef enum { napi_key_include_prototypes, napi_key_own_only @@ -134,6 +134,6 @@ typedef enum { napi_key_keep_numbers, napi_key_numbers_to_strings } napi_key_conversion; -#endif +#endif // NAPI_VERSION >= 6 #endif // SRC_JS_NATIVE_API_TYPES_H_ diff --git a/src/js_stream.cc b/src/js_stream.cc index a67fd37dbdb..e4da0ce747e 100644 --- a/src/js_stream.cc +++ b/src/js_stream.cc @@ -116,16 +116,15 @@ int JSStream::DoWrite(WriteWrap* w, HandleScope scope(env()->isolate()); Context::Scope context_scope(env()->context()); - Local bufs_arr = Array::New(env()->isolate(), count); - Local buf; + MaybeStackBuffer, 16> bufs_arr(count); for (size_t i = 0; i < count; i++) { - buf = Buffer::Copy(env(), bufs[i].base, bufs[i].len).ToLocalChecked(); - bufs_arr->Set(env()->context(), i, buf).Check(); + bufs_arr[i] = + Buffer::Copy(env(), bufs[i].base, bufs[i].len).ToLocalChecked(); } Local argv[] = { w->object(), - bufs_arr + Array::New(env()->isolate(), bufs_arr.out(), count) }; TryCatchScope try_catch(env()); @@ -205,7 +204,7 @@ void JSStream::Initialize(Local target, FIXED_ONE_BYTE_STRING(env->isolate(), "JSStream"); t->SetClassName(jsStreamString); t->InstanceTemplate() - ->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + ->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(t, "finishWrite", Finish); diff --git a/src/large_pages/node_large_page.cc b/src/large_pages/node_large_page.cc index ce58e32e719..31d85c1734a 100644 --- a/src/large_pages/node_large_page.cc +++ b/src/large_pages/node_large_page.cc @@ -21,6 +21,11 @@ // SPDX-License-Identifier: MIT #include "node_large_page.h" + +#include // NOLINT(build/include) + +// Besides returning ENOTSUP at runtime we do nothing if this define is missing. +#if defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES #include "util.h" #include "uv.h" @@ -35,7 +40,6 @@ #endif #include // readlink -#include // NOLINT(build/include) #include // PATH_MAX #include #include @@ -67,11 +71,19 @@ #if defined(__linux__) extern "C" { -extern char __executable_start; +// This symbol must be declared weak because this file becomes part of all +// Node.js targets (like node_mksnapshot, node_mkcodecache, and cctest) and +// those files do not supply the symbol. +extern char __attribute__((weak)) __node_text_start; +extern char __start_lpstub; } // extern "C" #endif // defined(__linux__) +#endif // defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES namespace node { +#if defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES + +namespace { struct text_region { char* from; @@ -103,7 +115,7 @@ inline uintptr_t hugepage_align_down(uintptr_t addr) { // 00400000-00452000 r-xp 00000000 08:02 173521 /usr/bin/dbus-daemon // This is also handling the case where the first line is not the binary. -static struct text_region FindNodeTextRegion() { +struct text_region FindNodeTextRegion() { struct text_region nregion; nregion.found_text_region = false; #if defined(__linux__) @@ -113,6 +125,8 @@ static struct text_region FindNodeTextRegion() { std::string dev; char dash; uintptr_t start, end, offset, inode; + uintptr_t node_text_start = reinterpret_cast(&__node_text_start); + uintptr_t lpstub_start = reinterpret_cast(&__start_lpstub); ifs.open("/proc/self/maps"); if (!ifs) { @@ -136,21 +150,15 @@ static struct text_region FindNodeTextRegion() { std::string pathname; iss >> pathname; - if (start != reinterpret_cast(&__executable_start)) + if (permission != "r-xp") continue; - // The next line is our .text section. - if (!std::getline(ifs, map_line)) - break; - - iss = std::istringstream(map_line); - iss >> std::hex >> start; - iss >> dash; - iss >> std::hex >> end; - iss >> permission; + if (node_text_start < start || node_text_start >= end) + continue; - if (permission != "r-xp") - break; + start = node_text_start; + if (lpstub_start > start && lpstub_start <= end) + end = lpstub_start; char* from = reinterpret_cast(hugepage_align_up(start)); char* to = reinterpret_cast(hugepage_align_down(end)); @@ -263,7 +271,7 @@ static struct text_region FindNodeTextRegion() { } #if defined(__linux__) -static bool IsTransparentHugePagesEnabled() { +bool IsTransparentHugePagesEnabled() { std::ifstream ifs; ifs.open("/sys/kernel/mm/transparent_hugepage/enabled"); @@ -294,6 +302,8 @@ static bool IsSuperPagesEnabled() { } #endif +} // End of anonymous namespace + // Moving the text region to large pages. We need to be very careful. // 1: This function itself should not be moved. // We use a gcc attributes @@ -308,7 +318,7 @@ static bool IsSuperPagesEnabled() { // d. If successful copy the code there and unmap the original region int #if !defined(__APPLE__) -__attribute__((__section__(".lpstub"))) +__attribute__((__section__("lpstub"))) #else __attribute__((__section__("__TEXT,__lpstub"))) #endif @@ -408,14 +418,26 @@ MoveTextRegionToLargePages(const text_region& r) { if (-1 == munmap(nmem, size)) PrintSystemError(errno); return ret; } +#endif // defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES // This is the primary API called from main. int MapStaticCodeToLargePages() { +#if defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES + bool have_thp = false; +#if defined(__linux__) + have_thp = IsTransparentHugePagesEnabled(); +#elif defined(__FreeBSD__) + have_thp = IsSuperPagesEnabled(); +#elif defined(__APPLE__) + // pse-36 flag is present in recent mac x64 products. + have_thp = true; +#endif + if (!have_thp) + return EACCES; + struct text_region r = FindNodeTextRegion(); - if (r.found_text_region == false) { - PrintWarning("failed to find text region"); - return -1; - } + if (r.found_text_region == false) + return ENOENT; #if defined(__FreeBSD__) if (r.from < reinterpret_cast(&MoveTextRegionToLargePages)) @@ -423,17 +445,32 @@ int MapStaticCodeToLargePages() { #endif return MoveTextRegionToLargePages(r); +#else + return ENOTSUP; +#endif } -bool IsLargePagesEnabled() { -#if defined(__linux__) - return IsTransparentHugePagesEnabled(); -#elif defined(__FreeBSD__) - return IsSuperPagesEnabled(); -#elif defined(__APPLE__) - // pse-36 flag is present in recent mac x64 products. - return true; -#endif +const char* LargePagesError(int status) { + switch (status) { + case ENOTSUP: + return "Mapping to large pages is not supported."; + + case EACCES: + return "Large pages are not enabled."; + + case ENOENT: + return "failed to find text region"; + + case -1: + return "Mapping code to large pages failed. Reverting to default page " + "size."; + + case 0: + return "OK"; + + default: + return "Unknown error"; + } } } // namespace node diff --git a/src/large_pages/node_large_page.h b/src/large_pages/node_large_page.h index bce505585cf..622cf09ede4 100644 --- a/src/large_pages/node_large_page.h +++ b/src/large_pages/node_large_page.h @@ -25,10 +25,9 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS - namespace node { -bool IsLargePagesEnabled(); int MapStaticCodeToLargePages(); +const char* LargePagesError(int status); } // namespace node #endif // NODE_WANT_INTERNALS diff --git a/src/large_pages/node_text_start.S b/src/large_pages/node_text_start.S new file mode 100644 index 00000000000..1609b254f04 --- /dev/null +++ b/src/large_pages/node_text_start.S @@ -0,0 +1,5 @@ +.text +.align 0x2000 +.global __node_text_start +.hidden __node_text_start +__node_text_start: diff --git a/src/memory_tracker-inl.h b/src/memory_tracker-inl.h index 1a28e2dd792..9e6201442ab 100644 --- a/src/memory_tracker-inl.h +++ b/src/memory_tracker-inl.h @@ -107,9 +107,9 @@ void MemoryTracker::TrackField(const char* edge_name, } } -template +template void MemoryTracker::TrackField(const char* edge_name, - const std::unique_ptr& value, + const std::unique_ptr& value, const char* node_name) { if (value.get() == nullptr) { return; diff --git a/src/memory_tracker.h b/src/memory_tracker.h index 616976ab2af..4a66e9ce74c 100644 --- a/src/memory_tracker.h +++ b/src/memory_tracker.h @@ -140,9 +140,9 @@ class MemoryTracker { const char* node_name = nullptr); // Shortcut to extract the underlying object out of the smart pointer - template + template inline void TrackField(const char* edge_name, - const std::unique_ptr& value, + const std::unique_ptr& value, const char* node_name = nullptr); template diff --git a/src/module_wrap.cc b/src/module_wrap.cc index 0bc32f7846b..b2afefce17e 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -25,6 +25,7 @@ using node::url::URL_FLAGS_FAILED; using v8::Array; using v8::ArrayBufferView; using v8::Context; +using v8::EscapableHandleScope; using v8::Function; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; @@ -45,6 +46,7 @@ using v8::PrimitiveArray; using v8::Promise; using v8::ScriptCompiler; using v8::ScriptOrigin; +using v8::ScriptOrModule; using v8::String; using v8::UnboundModuleScript; using v8::Undefined; @@ -264,11 +266,11 @@ void ModuleWrap::Link(const FunctionCallbackInfo& args) { Local mod_context = obj->context_.Get(isolate); Local module = obj->module_.Get(isolate); - Local promises = Array::New(isolate, - module->GetModuleRequestsLength()); + const int module_requests_length = module->GetModuleRequestsLength(); + MaybeStackBuffer, 16> promises(module_requests_length); // call the dependency resolve callbacks - for (int i = 0; i < module->GetModuleRequestsLength(); i++) { + for (int i = 0; i < module_requests_length; i++) { Local specifier = module->GetModuleRequest(i); Utf8Value specifier_utf8(env->isolate(), specifier); std::string specifier_std(*specifier_utf8, specifier_utf8.length()); @@ -290,10 +292,11 @@ void ModuleWrap::Link(const FunctionCallbackInfo& args) { Local resolve_promise = resolve_return_value.As(); obj->resolve_cache_[specifier_std].Reset(env->isolate(), resolve_promise); - promises->Set(mod_context, i, resolve_promise).Check(); + promises[i] = resolve_promise; } - args.GetReturnValue().Set(promises); + args.GetReturnValue().Set( + Array::New(isolate, promises.out(), promises.length())); } void ModuleWrap::Instantiate(const FunctionCallbackInfo& args) { @@ -426,12 +429,13 @@ void ModuleWrap::GetStaticDependencySpecifiers( int count = module->GetModuleRequestsLength(); - Local specifiers = Array::New(env->isolate(), count); + MaybeStackBuffer, 16> specifiers(count); for (int i = 0; i < count; i++) - specifiers->Set(env->context(), i, module->GetModuleRequest(i)).Check(); + specifiers[i] = module->GetModuleRequest(i); - args.GetReturnValue().Set(specifiers); + args.GetReturnValue().Set( + Array::New(env->isolate(), specifiers.out(), count)); } void ModuleWrap::GetError(const FunctionCallbackInfo& args) { @@ -447,7 +451,12 @@ MaybeLocal ModuleWrap::ResolveCallback(Local context, Local specifier, Local referrer) { Environment* env = Environment::GetCurrent(context); - CHECK_NOT_NULL(env); // TODO(addaleax): Handle nullptr here. + if (env == nullptr) { + Isolate* isolate = context->GetIsolate(); + THROW_ERR_EXECUTION_ENVIRONMENT_NOT_AVAILABLE(isolate); + return MaybeLocal(); + } + Isolate* isolate = env->isolate(); ModuleWrap* dependent = GetFromModule(env, referrer); @@ -625,7 +634,7 @@ Maybe GetPackageConfig(Environment* env, std::string pkg_src = source.FromJust(); Isolate* isolate = env->isolate(); - v8::HandleScope handle_scope(isolate); + HandleScope handle_scope(isolate); Local pkg_json; { @@ -856,10 +865,20 @@ void ThrowExportsNotFound(Environment* env, const std::string& subpath, const URL& pjson_url, const URL& base) { - const std::string msg = "Package exports for " + - pjson_url.ToFilePath() + " do not define a '" + subpath + - "' subpath, imported from " + base.ToFilePath(); - node::THROW_ERR_MODULE_NOT_FOUND(env, msg.c_str()); + const std::string msg = "Package subpath '" + subpath + "' is not defined" + + " by \"exports\" in " + pjson_url.ToFilePath() + " imported from " + + base.ToFilePath(); + node::THROW_ERR_PACKAGE_PATH_NOT_EXPORTED(env, msg.c_str()); +} + +void ThrowSubpathInvalid(Environment* env, + const std::string& subpath, + const URL& pjson_url, + const URL& base) { + const std::string msg = "Package subpath '" + subpath + "' is not a valid " + + "module request for the \"exports\" resolution of " + + pjson_url.ToFilePath() + " imported from " + base.ToFilePath(); + node::THROW_ERR_INVALID_MODULE_SPECIFIER(env, msg.c_str()); } void ThrowExportsInvalid(Environment* env, @@ -868,14 +887,15 @@ void ThrowExportsInvalid(Environment* env, const URL& pjson_url, const URL& base) { if (subpath.length()) { - const std::string msg = "Cannot resolve package exports target '" + target + - "' matched for '" + subpath + "' in " + pjson_url.ToFilePath() + - ", imported from " + base.ToFilePath(); - node::THROW_ERR_MODULE_NOT_FOUND(env, msg.c_str()); + const std::string msg = "Invalid \"exports\" target \"" + target + + "\" defined for '" + subpath + "' in the package config " + + pjson_url.ToFilePath() + " imported from " + base.ToFilePath(); + node::THROW_ERR_INVALID_PACKAGE_TARGET(env, msg.c_str()); } else { - const std::string msg = "Cannot resolve package main '" + target + "' in" + - pjson_url.ToFilePath() + ", imported from " + base.ToFilePath(); - node::THROW_ERR_MODULE_NOT_FOUND(env, msg.c_str()); + const std::string msg = "Invalid \"exports\" main target " + target + + " defined in the package config " + pjson_url.ToFilePath() + + " imported from " + base.ToFilePath(); + node::THROW_ERR_INVALID_PACKAGE_TARGET(env, msg.c_str()); } } @@ -885,14 +905,20 @@ void ThrowExportsInvalid(Environment* env, const URL& pjson_url, const URL& base) { Local target_string; - if (target->ToString(env->context()).ToLocal(&target_string)) { - Utf8Value target_utf8(env->isolate(), target_string); - std::string target_str(*target_utf8, target_utf8.length()); - if (target->IsArray()) { - target_str = '[' + target_str + ']'; - } - ThrowExportsInvalid(env, subpath, target_str, pjson_url, base); + if (target->IsObject()) { + if (!v8::JSON::Stringify(env->context(), target.As(), + v8::String::Empty(env->isolate())).ToLocal(&target_string)) + return; + } else { + if (!target->ToString(env->context()).ToLocal(&target_string)) + return; + } + Utf8Value target_utf8(env->isolate(), target_string); + std::string target_str(*target_utf8, target_utf8.length()); + if (target->IsArray()) { + target_str = '[' + target_str + ']'; } + ThrowExportsInvalid(env, subpath, target_str, pjson_url, base); } Maybe ResolveExportsTargetString(Environment* env, @@ -900,18 +926,13 @@ Maybe ResolveExportsTargetString(Environment* env, const std::string& subpath, const std::string& match, const URL& pjson_url, - const URL& base, - bool throw_invalid = true) { + const URL& base) { if (target.substr(0, 2) != "./") { - if (throw_invalid) { - ThrowExportsInvalid(env, match, target, pjson_url, base); - } + ThrowExportsInvalid(env, match, target, pjson_url, base); return Nothing(); } if (subpath.length() > 0 && target.back() != '/') { - if (throw_invalid) { - ThrowExportsInvalid(env, match, target, pjson_url, base); - } + ThrowExportsInvalid(env, match, target, pjson_url, base); return Nothing(); } URL resolved(target, pjson_url); @@ -920,9 +941,7 @@ Maybe ResolveExportsTargetString(Environment* env, if (resolved_path.find(pkg_path) != 0 || resolved_path.find("/node_modules/", pkg_path.length() - 1) != std::string::npos) { - if (throw_invalid) { - ThrowExportsInvalid(env, match, target, pjson_url, base); - } + ThrowExportsInvalid(env, match, target, pjson_url, base); return Nothing(); } if (subpath.length() == 0) return Just(resolved); @@ -931,9 +950,7 @@ Maybe ResolveExportsTargetString(Environment* env, if (subpath_resolved_path.find(resolved_path) != 0 || subpath_resolved_path.find("/node_modules/", pkg_path.length() - 1) != std::string::npos) { - if (throw_invalid) { - ThrowExportsInvalid(env, match, target + subpath, pjson_url, base); - } + ThrowSubpathInvalid(env, match + subpath, pjson_url, base); return Nothing(); } return Just(subpath_resolved); @@ -963,15 +980,14 @@ Maybe ResolveExportsTarget(Environment* env, Local target, const std::string& subpath, const std::string& pkg_subpath, - const URL& base, - bool throw_invalid = true) { + const URL& base) { Isolate* isolate = env->isolate(); Local context = env->context(); if (target->IsString()) { - Utf8Value target_utf8(isolate, target.As()); + Utf8Value target_utf8(isolate, target.As()); std::string target_str(*target_utf8, target_utf8.length()); Maybe resolved = ResolveExportsTargetString(env, target_str, subpath, - pkg_subpath, pjson_url, base, throw_invalid); + pkg_subpath, pjson_url, base); if (resolved.IsNothing()) { return Nothing(); } @@ -980,40 +996,56 @@ Maybe ResolveExportsTarget(Environment* env, Local target_arr = target.As(); const uint32_t length = target_arr->Length(); if (length == 0) { - if (throw_invalid) { - ThrowExportsInvalid(env, pkg_subpath, target, pjson_url, base); - } + ThrowExportsInvalid(env, pkg_subpath, target, pjson_url, base); return Nothing(); } for (uint32_t i = 0; i < length; i++) { auto target_item = target_arr->Get(context, i).ToLocalChecked(); - if (!target_item->IsArray()) { + { + TryCatchScope try_catch(env); Maybe resolved = ResolveExportsTarget(env, pjson_url, - target_item, subpath, pkg_subpath, base, false); - if (resolved.IsNothing()) continue; + target_item, subpath, pkg_subpath, base); + if (resolved.IsNothing()) { + CHECK(try_catch.HasCaught()); + if (try_catch.Exception().IsEmpty()) return Nothing(); + Local e; + if (!try_catch.Exception()->ToObject(context).ToLocal(&e)) + return Nothing(); + Local code; + if (!e->Get(context, env->code_string()).ToLocal(&code)) + return Nothing(); + Local code_string; + if (!code->ToString(context).ToLocal(&code_string)) + return Nothing(); + Utf8Value code_utf8(env->isolate(), code_string); + if (strcmp(*code_utf8, "ERR_PACKAGE_PATH_NOT_EXPORTED") == 0 || + strcmp(*code_utf8, "ERR_INVALID_PACKAGE_TARGET") == 0) { + continue; + } + try_catch.ReThrow(); + return Nothing(); + } + CHECK(!try_catch.HasCaught()); return FinalizeResolution(env, resolved.FromJust(), base); } } - if (throw_invalid) { - auto invalid = target_arr->Get(context, length - 1).ToLocalChecked(); - Maybe resolved = ResolveExportsTarget(env, pjson_url, invalid, - subpath, pkg_subpath, base, true); - CHECK(resolved.IsNothing()); - } + auto invalid = target_arr->Get(context, length - 1).ToLocalChecked(); + Maybe resolved = ResolveExportsTarget(env, pjson_url, invalid, + subpath, pkg_subpath, base); + CHECK(resolved.IsNothing()); return Nothing(); } else if (target->IsObject()) { Local target_obj = target.As(); Local target_obj_keys = target_obj->GetOwnPropertyNames(context).ToLocalChecked(); Local conditionalTarget; - bool matched = false; for (uint32_t i = 0; i < target_obj_keys->Length(); ++i) { Local key = target_obj_keys->Get(context, i).ToLocalChecked(); if (IsArrayIndex(env, key)) { - const std::string msg = "Invalid package config for " + - pjson_url.ToFilePath() + ", \"exports\" cannot contain numeric " + - "property keys."; + const std::string msg = "Invalid package config " + + pjson_url.ToFilePath() + " imported from " + base.ToFilePath() + + ". \"exports\" cannot contain numeric property keys."; node::THROW_ERR_INVALID_PACKAGE_CONFIG(env, msg.c_str()); return Nothing(); } @@ -1024,35 +1056,58 @@ Maybe ResolveExportsTarget(Environment* env, key->ToString(context).ToLocalChecked()); std::string key_str(*key_utf8, key_utf8.length()); if (key_str == "node" || key_str == "import") { - matched = true; conditionalTarget = target_obj->Get(context, key).ToLocalChecked(); - Maybe resolved = ResolveExportsTarget(env, pjson_url, - conditionalTarget, subpath, pkg_subpath, base, false); - if (!resolved.IsNothing()) { - ProcessEmitExperimentalWarning(env, "Conditional exports"); + { + TryCatchScope try_catch(env); + Maybe resolved = ResolveExportsTarget(env, pjson_url, + conditionalTarget, subpath, pkg_subpath, base); + if (resolved.IsNothing()) { + CHECK(try_catch.HasCaught()); + if (try_catch.Exception().IsEmpty()) return Nothing(); + Local e; + if (!try_catch.Exception()->ToObject(context).ToLocal(&e)) + return Nothing(); + Local code; + if (!e->Get(context, env->code_string()).ToLocal(&code)) + return Nothing(); + Local code_string; + if (!code->ToString(context).ToLocal(&code_string)) + return Nothing(); + Utf8Value code_utf8(env->isolate(), code_string); + if (strcmp(*code_utf8, "ERR_PACKAGE_PATH_NOT_EXPORTED") == 0) + continue; + try_catch.ReThrow(); + return Nothing(); + } + CHECK(!try_catch.HasCaught()); return resolved; } } else if (key_str == "default") { - matched = true; conditionalTarget = target_obj->Get(context, key).ToLocalChecked(); - Maybe resolved = ResolveExportsTarget(env, pjson_url, - conditionalTarget, subpath, pkg_subpath, base, false); - if (!resolved.IsNothing()) { - ProcessEmitExperimentalWarning(env, "Conditional exports"); + { + TryCatchScope try_catch(env); + Maybe resolved = ResolveExportsTarget(env, pjson_url, + conditionalTarget, subpath, pkg_subpath, base); + if (resolved.IsNothing()) { + CHECK(try_catch.HasCaught() && !try_catch.Exception().IsEmpty()); + auto e = try_catch.Exception()->ToObject(context).ToLocalChecked(); + auto code = e->Get(context, env->code_string()).ToLocalChecked(); + Utf8Value code_utf8(env->isolate(), + code->ToString(context).ToLocalChecked()); + std::string code_str(*code_utf8, code_utf8.length()); + if (code_str == "ERR_PACKAGE_PATH_NOT_EXPORTED") continue; + try_catch.ReThrow(); + return Nothing(); + } + CHECK(!try_catch.HasCaught()); return resolved; } } } - if (matched && throw_invalid) { - Maybe resolved = ResolveExportsTarget(env, pjson_url, - conditionalTarget, subpath, pkg_subpath, base, true); - CHECK(resolved.IsNothing()); - return Nothing(); - } - } - if (throw_invalid) { - ThrowExportsInvalid(env, pkg_subpath, target, pjson_url, base); + ThrowExportsNotFound(env, pkg_subpath, pjson_url, base); + return Nothing(); } + ThrowExportsInvalid(env, pkg_subpath, target, pjson_url, base); return Nothing(); } @@ -1074,8 +1129,8 @@ Maybe IsConditionalExportsMainSugar(Environment* env, if (i == 0) { isConditionalSugar = curIsConditionalSugar; } else if (isConditionalSugar != curIsConditionalSugar) { - const std::string msg = "Cannot resolve package exports in " + - pjson_url.ToFilePath() + ", imported from " + base.ToFilePath() + ". " + + const std::string msg = "Invalid package config " + pjson_url.ToFilePath() + + " imported from " + base.ToFilePath() + ". " + "\"exports\" cannot contain some keys starting with '.' and some not." + " The exports object must either be an object of package subpath keys" + " or an object of main entry condition name keys only."; @@ -1100,8 +1155,7 @@ Maybe PackageMainResolve(Environment* env, if (isConditionalExportsMainSugar.IsNothing()) return Nothing(); if (isConditionalExportsMainSugar.FromJust()) { - return ResolveExportsTarget(env, pjson_url, exports, "", "", base, - true); + return ResolveExportsTarget(env, pjson_url, exports, "", "", base); } else if (exports->IsObject()) { Local exports_obj = exports.As(); if (exports_obj->HasOwnProperty(env->context(), env->dot_string()) @@ -1109,10 +1163,12 @@ Maybe PackageMainResolve(Environment* env, Local target = exports_obj->Get(env->context(), env->dot_string()) .ToLocalChecked(); - return ResolveExportsTarget(env, pjson_url, target, "", "", base, - true); + return ResolveExportsTarget(env, pjson_url, target, "", "", base); } } + std::string msg = "No \"exports\" main resolved in " + + pjson_url.ToFilePath(); + node::THROW_ERR_PACKAGE_PATH_NOT_EXPORTED(env, msg.c_str()); } if (pcfg.has_main == HasMain::Yes) { URL resolved(pcfg.main, pjson_url); @@ -1204,39 +1260,6 @@ Maybe PackageExportsResolve(Environment* env, return Nothing(); } -Maybe ResolveSelf(Environment* env, - const std::string& pkg_name, - const std::string& pkg_subpath, - const URL& base) { - const PackageConfig* pcfg; - if (GetPackageScopeConfig(env, base, base).To(&pcfg) && - pcfg->exists == Exists::Yes) { - // TODO(jkrems): Find a way to forward the pair/iterator already generated - // while executing GetPackageScopeConfig - URL pjson_url(""); - bool found_pjson = false; - for (auto it = env->package_json_cache.begin(); - it != env->package_json_cache.end(); - ++it) { - if (&it->second == pcfg) { - pjson_url = URL::FromFilePath(it->first); - found_pjson = true; - } - } - if (!found_pjson || pcfg->name != pkg_name) return Nothing(); - if (pcfg->exports.IsEmpty()) return Nothing(); - if (pkg_subpath == "./") { - return Just(URL("./", pjson_url)); - } else if (!pkg_subpath.length()) { - return PackageMainResolve(env, pjson_url, *pcfg, base); - } else { - return PackageExportsResolve(env, pjson_url, pkg_subpath, *pcfg, base); - } - } - - return Nothing(); -} - Maybe PackageResolve(Environment* env, const std::string& specifier, const URL& base) { @@ -1277,10 +1300,29 @@ Maybe PackageResolve(Environment* env, pkg_subpath = "." + specifier.substr(sep_index); } - Maybe self_url = ResolveSelf(env, pkg_name, pkg_subpath, base); - if (self_url.IsJust()) { - ProcessEmitExperimentalWarning(env, "Package name self resolution"); - return self_url; + // ResolveSelf + const PackageConfig* pcfg; + if (GetPackageScopeConfig(env, base, base).To(&pcfg) && + pcfg->exists == Exists::Yes) { + // TODO(jkrems): Find a way to forward the pair/iterator already generated + // while executing GetPackageScopeConfig + URL pjson_url(""); + bool found_pjson = false; + for (const auto& it : env->package_json_cache) { + if (&it.second == pcfg) { + pjson_url = URL::FromFilePath(it.first); + found_pjson = true; + } + } + if (found_pjson && pcfg->name == pkg_name && !pcfg->exports.IsEmpty()) { + if (pkg_subpath == "./") { + return Just(URL("./", pjson_url)); + } else if (!pkg_subpath.length()) { + return PackageMainResolve(env, pjson_url, *pcfg, base); + } else { + return PackageExportsResolve(env, pjson_url, pkg_subpath, *pcfg, base); + } + } } URL pjson_url("./node_modules/" + pkg_name + "/package.json", &base); @@ -1402,12 +1444,16 @@ void ModuleWrap::GetPackageType(const FunctionCallbackInfo& args) { static MaybeLocal ImportModuleDynamically( Local context, - Local referrer, + Local referrer, Local specifier) { Isolate* iso = context->GetIsolate(); Environment* env = Environment::GetCurrent(context); - CHECK_NOT_NULL(env); // TODO(addaleax): Handle nullptr here. - v8::EscapableHandleScope handle_scope(iso); + if (env == nullptr) { + THROW_ERR_EXECUTION_ENVIRONMENT_NOT_AVAILABLE(iso); + return MaybeLocal(); + } + + EscapableHandleScope handle_scope(iso); Local import_callback = env->host_import_module_dynamically_callback(); @@ -1599,7 +1645,8 @@ void ModuleWrap::Initialize(Local target, Local tpl = env->NewFunctionTemplate(New); tpl->SetClassName(FIXED_ONE_BYTE_STRING(isolate, "ModuleWrap")); - tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->InstanceTemplate()->SetInternalFieldCount( + ModuleWrap::kInternalFieldCount); env->SetProtoMethod(tpl, "link", Link); env->SetProtoMethod(tpl, "instantiate", Instantiate); diff --git a/src/node.cc b/src/node.cc index a0398b1a4f8..1fec85aa793 100644 --- a/src/node.cc +++ b/src/node.cc @@ -65,9 +65,7 @@ #include "inspector/worker_inspector.h" // ParentInspectorHandle #endif -#ifdef NODE_ENABLE_LARGE_CODE_PAGES #include "large_pages/node_large_page.h" -#endif #ifdef NODE_REPORT #include "node_report.h" @@ -397,6 +395,12 @@ MaybeLocal StartExecution(Environment* env, const char* main_script_id) { ->GetFunction(env->context()) .ToLocalChecked()}; + InternalCallbackScope callback_scope( + env, + Object::New(env->isolate()), + { 1, 0 }, + InternalCallbackScope::kSkipAsyncHooks); + return scope.EscapeMaybe( ExecuteBootstrapper(env, main_script_id, ¶meters, &arguments)); } @@ -912,6 +916,10 @@ void Init(int* argc, } InitializationResult InitializeOncePerProcess(int argc, char** argv) { + // Initialized the enabled list for Debug() calls with system + // environment variables. + per_process::enabled_debug_list.Parse(nullptr); + atexit(ResetStdio); PlatformInit(); @@ -936,25 +944,13 @@ InitializationResult InitializeOncePerProcess(int argc, char** argv) { } } -#if defined(NODE_ENABLE_LARGE_CODE_PAGES) && NODE_ENABLE_LARGE_CODE_PAGES if (per_process::cli_options->use_largepages == "on" || per_process::cli_options->use_largepages == "silent") { - if (node::IsLargePagesEnabled()) { - if (node::MapStaticCodeToLargePages() != 0 && - per_process::cli_options->use_largepages != "silent") { - fprintf(stderr, - "Mapping code to large pages failed. Reverting to default page " - "size.\n"); - } - } else if (per_process::cli_options->use_largepages != "silent") { - fprintf(stderr, "Large pages are not enabled.\n"); + int result = node::MapStaticCodeToLargePages(); + if (per_process::cli_options->use_largepages == "on" && result != 0) { + fprintf(stderr, "%s\n", node::LargePagesError(result)); } } -#else - if (per_process::cli_options->use_largepages == "on") { - fprintf(stderr, "Mapping to large pages is not supported.\n"); - } -#endif // NODE_ENABLE_LARGE_CODE_PAGES if (per_process::cli_options->print_version) { printf("%s\n", NODE_VERSION); diff --git a/src/node_contextify.cc b/src/node_contextify.cc index 46a1d7c8ef0..e6e2d123c04 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -47,17 +47,20 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::IndexedPropertyHandlerConfiguration; +using v8::Int32; using v8::Integer; using v8::Isolate; using v8::Local; using v8::Maybe; using v8::MaybeLocal; +using v8::MeasureMemoryMode; using v8::Name; using v8::NamedPropertyHandlerConfiguration; using v8::Number; using v8::Object; using v8::ObjectTemplate; using v8::PrimitiveArray; +using v8::Promise; using v8::PropertyAttribute; using v8::PropertyCallbackInfo; using v8::PropertyDescriptor; @@ -142,7 +145,7 @@ MaybeLocal ContextifyContext::CreateDataWrapper(Environment* env) { return MaybeLocal(); } - wrapper->SetAlignedPointerInInternalField(0, this); + wrapper->SetAlignedPointerInInternalField(ContextifyContext::kSlot, this); return wrapper; } @@ -185,8 +188,11 @@ MaybeLocal ContextifyContext::CreateV8Context( object_template->SetHandler(config); object_template->SetHandler(indexed_config); - - Local ctx = NewContext(env->isolate(), object_template); + Local ctx = Context::New(env->isolate(), nullptr, object_template); + if (ctx.IsEmpty()) return MaybeLocal(); + // Only partially initialize the context - the primordials are left out + // and only initialized when necessary. + InitializeContextRuntime(ctx); if (ctx.IsEmpty()) { return MaybeLocal(); @@ -226,7 +232,8 @@ MaybeLocal ContextifyContext::CreateV8Context( void ContextifyContext::Init(Environment* env, Local target) { Local function_template = FunctionTemplate::New(env->isolate()); - function_template->InstanceTemplate()->SetInternalFieldCount(1); + function_template->InstanceTemplate()->SetInternalFieldCount( + ContextifyContext::kInternalFieldCount); env->set_script_data_constructor_function( function_template->GetFunction(env->context()).ToLocalChecked()); @@ -325,7 +332,8 @@ template ContextifyContext* ContextifyContext::Get(const PropertyCallbackInfo& args) { Local data = args.Data(); return static_cast( - data.As()->GetAlignedPointerFromInternalField(0)); + data.As()->GetAlignedPointerFromInternalField( + ContextifyContext::kSlot)); } // static @@ -622,7 +630,8 @@ void ContextifyScript::Init(Environment* env, Local target) { FIXED_ONE_BYTE_STRING(env->isolate(), "ContextifyScript"); Local script_tmpl = env->NewFunctionTemplate(New); - script_tmpl->InstanceTemplate()->SetInternalFieldCount(1); + script_tmpl->InstanceTemplate()->SetInternalFieldCount( + ContextifyScript::kInternalFieldCount); script_tmpl->SetClassName(class_name); env->SetProtoMethod(script_tmpl, "createCachedData", CreateCachedData); env->SetProtoMethod(script_tmpl, "runInContext", RunInContext); @@ -1200,11 +1209,39 @@ static void WatchdogHasPendingSigint(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(ret); } +static void MeasureMemory(const FunctionCallbackInfo& args) { + CHECK(args[0]->IsInt32()); + int32_t mode = args[0].As()->Value(); + Isolate* isolate = args.GetIsolate(); + Environment* env = Environment::GetCurrent(args); + Local context; + if (args[1]->IsUndefined()) { + context = isolate->GetCurrentContext(); + } else { + CHECK(args[1]->IsObject()); + ContextifyContext* sandbox = + ContextifyContext::ContextFromContextifiedSandbox(env, + args[1].As()); + CHECK_NOT_NULL(sandbox); + context = sandbox->context(); + if (context.IsEmpty()) { // Not yet fully initilaized + return; + } + } + v8::Local promise; + if (!isolate->MeasureMemory(context, static_cast(mode)) + .ToLocal(&promise)) { + return; + } + args.GetReturnValue().Set(promise); +} + void Initialize(Local target, Local unused, Local context, void* priv) { Environment* env = Environment::GetCurrent(context); + Isolate* isolate = env->isolate(); ContextifyContext::Init(env, target); ContextifyScript::Init(env, target); @@ -1217,10 +1254,24 @@ void Initialize(Local target, { Local tpl = FunctionTemplate::New(env->isolate()); tpl->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "CompiledFnEntry")); - tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->InstanceTemplate()->SetInternalFieldCount( + CompiledFnEntry::kInternalFieldCount); env->set_compiled_fn_entry_template(tpl->InstanceTemplate()); } + + Local constants = Object::New(env->isolate()); + Local measure_memory = Object::New(env->isolate()); + Local memory_mode = Object::New(env->isolate()); + MeasureMemoryMode SUMMARY = MeasureMemoryMode::kSummary; + MeasureMemoryMode DETAILED = MeasureMemoryMode::kDetailed; + NODE_DEFINE_CONSTANT(memory_mode, SUMMARY); + NODE_DEFINE_CONSTANT(memory_mode, DETAILED); + READONLY_PROPERTY(measure_memory, "mode", memory_mode); + READONLY_PROPERTY(constants, "measureMemory", measure_memory); + target->Set(context, env->constants_string(), constants).Check(); + + env->SetMethod(target, "measureMemory", MeasureMemory); } } // namespace contextify diff --git a/src/node_contextify.h b/src/node_contextify.h index cf1e8475075..f04ea86f41a 100644 --- a/src/node_contextify.h +++ b/src/node_contextify.h @@ -19,6 +19,7 @@ struct ContextOptions { class ContextifyContext { public: + enum InternalFields { kSlot, kInternalFieldCount }; ContextifyContext(Environment* env, v8::Local sandbox_obj, const ContextOptions& options); diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 92760fb8c85..fdaf91acdc0 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -22,6 +22,7 @@ #include "node_crypto.h" #include "node_buffer.h" #include "node_crypto_bio.h" +#include "node_crypto_common.h" #include "node_crypto_clienthello-inl.h" #include "node_crypto_groups.h" #include "node_errors.h" @@ -59,11 +60,6 @@ #include #include -static const int X509_NAME_FLAGS = ASN1_STRFLGS_ESC_CTRL - | ASN1_STRFLGS_UTF8_CONVERT - | XN_FLAG_SEP_MULTILINE - | XN_FLAG_FN_SN; - namespace node { namespace crypto { @@ -75,7 +71,6 @@ using v8::Boolean; using v8::ConstructorBehavior; using v8::Context; using v8::DontDelete; -using v8::EscapableHandleScope; using v8::Exception; using v8::External; using v8::False; @@ -110,24 +105,6 @@ using v8::Value; # define IS_OCB_MODE(mode) ((mode) == EVP_CIPH_OCB_MODE) #endif -struct StackOfX509Deleter { - void operator()(STACK_OF(X509)* p) const { sk_X509_pop_free(p, X509_free); } -}; -using StackOfX509 = std::unique_ptr; - -struct StackOfXASN1Deleter { - void operator()(STACK_OF(ASN1_OBJECT)* p) const { - sk_ASN1_OBJECT_pop_free(p, ASN1_OBJECT_free); - } -}; -using StackOfASN1 = std::unique_ptr; - -// OPENSSL_free is a macro, so we need a wrapper function. -struct OpenSSLBufferDeleter { - void operator()(char* pointer) const { OPENSSL_free(pointer); } -}; -using OpenSSLBuffer = std::unique_ptr; - static const char* const root_certs[] = { #include "node_root_certs.h" // NOLINT(build/include_order) }; @@ -386,7 +363,7 @@ void ThrowCryptoError(Environment* env, unsigned long err, // NOLINT(runtime/int) // Default, only used if there is no SSL `err` which can // be used to create a long-style message string. - const char* message = nullptr) { + const char* message) { char message_buffer[128] = {0}; if (err != 0 || message == nullptr) { ERR_error_string_n(err, message_buffer, sizeof(message_buffer)); @@ -453,18 +430,10 @@ bool EntropySource(unsigned char* buffer, size_t length) { return RAND_bytes(buffer, length) != -1; } - -template -static T* MallocOpenSSL(size_t count) { - void* mem = OPENSSL_malloc(MultiplyWithOverflowCheck(count, sizeof(T))); - CHECK_IMPLIES(mem == nullptr, count == 0); - return static_cast(mem); -} - - void SecureContext::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + SecureContext::kInternalFieldCount); Local secureContextString = FIXED_ONE_BYTE_STRING(env->isolate(), "SecureContext"); t->SetClassName(secureContextString); @@ -531,6 +500,24 @@ void SecureContext::Initialize(Environment* env, Local target) { env->set_secure_context_constructor_template(t); } +SecureContext::SecureContext(Environment* env, v8::Local wrap) + : BaseObject(env, wrap) { + MakeWeak(); + env->isolate()->AdjustAmountOfExternalAllocatedMemory(kExternalSize); +} + +inline void SecureContext::Reset() { + if (ctx_ != nullptr) { + env()->isolate()->AdjustAmountOfExternalAllocatedMemory(-kExternalSize); + } + ctx_.reset(); + cert_.reset(); + issuer_.reset(); +} + +SecureContext::~SecureContext() { + Reset(); +} void SecureContext::New(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -812,16 +799,6 @@ void SecureContext::SetEngineKey(const FunctionCallbackInfo& args) { } #endif // !OPENSSL_NO_ENGINE -int SSL_CTX_get_issuer(SSL_CTX* ctx, X509* cert, X509** issuer) { - X509_STORE* store = SSL_CTX_get_cert_store(ctx); - DeleteFnPtr store_ctx( - X509_STORE_CTX_new()); - return store_ctx.get() != nullptr && - X509_STORE_CTX_init(store_ctx.get(), store, nullptr, nullptr) == 1 && - X509_STORE_CTX_get1_issuer(issuer, store_ctx.get(), cert) == 1; -} - - int SSL_CTX_use_certificate_chain(SSL_CTX* ctx, X509Pointer&& x, STACK_OF(X509)* extra_certs, @@ -1011,19 +988,19 @@ static X509_STORE* NewRootCertStore() { void GetRootCertificates(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - Local result = Array::New(env->isolate(), arraysize(root_certs)); + Local result[arraysize(root_certs)]; for (size_t i = 0; i < arraysize(root_certs); i++) { - Local value; - if (!String::NewFromOneByte(env->isolate(), - reinterpret_cast(root_certs[i]), - NewStringType::kNormal).ToLocal(&value) || - !result->Set(env->context(), i, value).FromMaybe(false)) { + if (!String::NewFromOneByte( + env->isolate(), + reinterpret_cast(root_certs[i]), + NewStringType::kNormal).ToLocal(&result[i])) { return; } } - args.GetReturnValue().Set(result); + args.GetReturnValue().Set( + Array::New(env->isolate(), result, arraysize(root_certs))); } @@ -1743,6 +1720,8 @@ void SSLWrap::AddMethods(Environment* env, Local t) { env->SetProtoMethodNoSideEffect(t, "verifyError", VerifyError); env->SetProtoMethodNoSideEffect(t, "getCipher", GetCipher); env->SetProtoMethodNoSideEffect(t, "getSharedSigalgs", GetSharedSigalgs); + env->SetProtoMethodNoSideEffect( + t, "exportKeyingMaterial", ExportKeyingMaterial); env->SetProtoMethod(t, "endParser", EndParser); env->SetProtoMethod(t, "certCbDone", CertCbDone); env->SetProtoMethod(t, "renegotiate", Renegotiate); @@ -1872,381 +1851,6 @@ void SSLWrap::OnClientHello(void* arg, w->MakeCallback(env->onclienthello_string(), arraysize(argv), argv); } - -static bool SafeX509ExtPrint(BIO* out, X509_EXTENSION* ext) { - const X509V3_EXT_METHOD* method = X509V3_EXT_get(ext); - - if (method != X509V3_EXT_get_nid(NID_subject_alt_name)) - return false; - - GENERAL_NAMES* names = static_cast(X509V3_EXT_d2i(ext)); - if (names == nullptr) - return false; - - for (int i = 0; i < sk_GENERAL_NAME_num(names); i++) { - GENERAL_NAME* gen = sk_GENERAL_NAME_value(names, i); - - if (i != 0) - BIO_write(out, ", ", 2); - - if (gen->type == GEN_DNS) { - ASN1_IA5STRING* name = gen->d.dNSName; - - BIO_write(out, "DNS:", 4); - BIO_write(out, name->data, name->length); - } else { - STACK_OF(CONF_VALUE)* nval = i2v_GENERAL_NAME( - const_cast(method), gen, nullptr); - if (nval == nullptr) - return false; - X509V3_EXT_val_prn(out, nval, 0, 0); - sk_CONF_VALUE_pop_free(nval, X509V3_conf_free); - } - } - sk_GENERAL_NAME_pop_free(names, GENERAL_NAME_free); - - return true; -} - - -static void AddFingerprintDigest(const unsigned char* md, - unsigned int md_size, - char (*fingerprint)[3 * EVP_MAX_MD_SIZE + 1]) { - unsigned int i; - const char hex[] = "0123456789ABCDEF"; - - for (i = 0; i < md_size; i++) { - (*fingerprint)[3*i] = hex[(md[i] & 0xf0) >> 4]; - (*fingerprint)[(3*i)+1] = hex[(md[i] & 0x0f)]; - (*fingerprint)[(3*i)+2] = ':'; - } - - if (md_size > 0) { - (*fingerprint)[(3*(md_size-1))+2] = '\0'; - } else { - (*fingerprint)[0] = '\0'; - } -} - - -static MaybeLocal ECPointToBuffer(Environment* env, - const EC_GROUP* group, - const EC_POINT* point, - point_conversion_form_t form, - const char** error) { - size_t len = EC_POINT_point2oct(group, point, form, nullptr, 0, nullptr); - if (len == 0) { - if (error != nullptr) *error = "Failed to get public key length"; - return MaybeLocal(); - } - AllocatedBuffer buf = env->AllocateManaged(len); - len = EC_POINT_point2oct(group, - point, - form, - reinterpret_cast(buf.data()), - buf.size(), - nullptr); - if (len == 0) { - if (error != nullptr) *error = "Failed to get public key"; - return MaybeLocal(); - } - return buf.ToBuffer(); -} - - -static Local X509ToObject(Environment* env, X509* cert) { - EscapableHandleScope scope(env->isolate()); - Local context = env->context(); - Local info = Object::New(env->isolate()); - - BIOPointer bio(BIO_new(BIO_s_mem())); - BUF_MEM* mem; - if (X509_NAME_print_ex(bio.get(), - X509_get_subject_name(cert), - 0, - X509_NAME_FLAGS) > 0) { - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->subject_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - } - USE(BIO_reset(bio.get())); - - X509_NAME* issuer_name = X509_get_issuer_name(cert); - if (X509_NAME_print_ex(bio.get(), issuer_name, 0, X509_NAME_FLAGS) > 0) { - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->issuer_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - } - USE(BIO_reset(bio.get())); - - int nids[] = { NID_subject_alt_name, NID_info_access }; - Local keys[] = { env->subjectaltname_string(), - env->infoaccess_string() }; - CHECK_EQ(arraysize(nids), arraysize(keys)); - for (size_t i = 0; i < arraysize(nids); i++) { - int index = X509_get_ext_by_NID(cert, nids[i], -1); - if (index < 0) - continue; - - X509_EXTENSION* ext = X509_get_ext(cert, index); - CHECK_NOT_NULL(ext); - - if (!SafeX509ExtPrint(bio.get(), ext) && - X509V3_EXT_print(bio.get(), ext, 0, 0) != 1) { - info->Set(context, keys[i], Null(env->isolate())).Check(); - USE(BIO_reset(bio.get())); - continue; - } - - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, keys[i], - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - - USE(BIO_reset(bio.get())); - } - - EVPKeyPointer pkey(X509_get_pubkey(cert)); - RSAPointer rsa; - ECPointer ec; - if (pkey) { - switch (EVP_PKEY_id(pkey.get())) { - case EVP_PKEY_RSA: - rsa.reset(EVP_PKEY_get1_RSA(pkey.get())); - break; - case EVP_PKEY_EC: - ec.reset(EVP_PKEY_get1_EC_KEY(pkey.get())); - break; - } - } - - if (rsa) { - const BIGNUM* n; - const BIGNUM* e; - RSA_get0_key(rsa.get(), &n, &e, nullptr); - BN_print(bio.get(), n); - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->modulus_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - USE(BIO_reset(bio.get())); - - int bits = BN_num_bits(n); - info->Set(context, env->bits_string(), - Integer::New(env->isolate(), bits)).Check(); - - uint64_t exponent_word = static_cast(BN_get_word(e)); - uint32_t lo = static_cast(exponent_word); - uint32_t hi = static_cast(exponent_word >> 32); - if (hi == 0) { - BIO_printf(bio.get(), "0x%x", lo); - } else { - BIO_printf(bio.get(), "0x%x%08x", hi, lo); - } - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->exponent_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - USE(BIO_reset(bio.get())); - - int size = i2d_RSA_PUBKEY(rsa.get(), nullptr); - CHECK_GE(size, 0); - Local pubbuff = Buffer::New(env, size).ToLocalChecked(); - unsigned char* pubserialized = - reinterpret_cast(Buffer::Data(pubbuff)); - i2d_RSA_PUBKEY(rsa.get(), &pubserialized); - info->Set(env->context(), env->pubkey_string(), pubbuff).Check(); - } else if (ec) { - const EC_GROUP* group = EC_KEY_get0_group(ec.get()); - if (group != nullptr) { - int bits = EC_GROUP_order_bits(group); - if (bits > 0) { - info->Set(context, env->bits_string(), - Integer::New(env->isolate(), bits)).Check(); - } - } - - const EC_POINT* pubkey = EC_KEY_get0_public_key(ec.get()); - Local buf; - if (pubkey != nullptr && - ECPointToBuffer( - env, group, pubkey, EC_KEY_get_conv_form(ec.get()), nullptr) - .ToLocal(&buf)) { - info->Set(context, env->pubkey_string(), buf).Check(); - } - - const int nid = EC_GROUP_get_curve_name(group); - if (nid != 0) { - // Curve is well-known, get its OID and NIST nick-name (if it has one). - - if (const char* sn = OBJ_nid2sn(nid)) { - info->Set(context, env->asn1curve_string(), - OneByteString(env->isolate(), sn)).Check(); - } - - if (const char* nist = EC_curve_nid2nist(nid)) { - info->Set(context, env->nistcurve_string(), - OneByteString(env->isolate(), nist)).Check(); - } - } else { - // Unnamed curves can be described by their mathematical properties, - // but aren't used much (at all?) with X.509/TLS. Support later if needed. - } - } - - pkey.reset(); - rsa.reset(); - ec.reset(); - - ASN1_TIME_print(bio.get(), X509_get0_notBefore(cert)); - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->valid_from_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - USE(BIO_reset(bio.get())); - - ASN1_TIME_print(bio.get(), X509_get0_notAfter(cert)); - BIO_get_mem_ptr(bio.get(), &mem); - info->Set(context, env->valid_to_string(), - String::NewFromUtf8(env->isolate(), mem->data, - NewStringType::kNormal, - mem->length).ToLocalChecked()).Check(); - bio.reset(); - - unsigned char md[EVP_MAX_MD_SIZE]; - unsigned int md_size; - char fingerprint[EVP_MAX_MD_SIZE * 3 + 1]; - if (X509_digest(cert, EVP_sha1(), md, &md_size)) { - AddFingerprintDigest(md, md_size, &fingerprint); - info->Set(context, env->fingerprint_string(), - OneByteString(env->isolate(), fingerprint)).Check(); - } - if (X509_digest(cert, EVP_sha256(), md, &md_size)) { - AddFingerprintDigest(md, md_size, &fingerprint); - info->Set(context, env->fingerprint256_string(), - OneByteString(env->isolate(), fingerprint)).Check(); - } - - StackOfASN1 eku(static_cast( - X509_get_ext_d2i(cert, NID_ext_key_usage, nullptr, nullptr))); - if (eku) { - Local ext_key_usage = Array::New(env->isolate()); - char buf[256]; - - int j = 0; - for (int i = 0; i < sk_ASN1_OBJECT_num(eku.get()); i++) { - if (OBJ_obj2txt(buf, - sizeof(buf), - sk_ASN1_OBJECT_value(eku.get(), i), 1) >= 0) { - ext_key_usage->Set(context, - j++, - OneByteString(env->isolate(), buf)).Check(); - } - } - - eku.reset(); - info->Set(context, env->ext_key_usage_string(), ext_key_usage).Check(); - } - - if (ASN1_INTEGER* serial_number = X509_get_serialNumber(cert)) { - BignumPointer bn(ASN1_INTEGER_to_BN(serial_number, nullptr)); - if (bn) { - OpenSSLBuffer buf(BN_bn2hex(bn.get())); - if (buf) { - info->Set(context, env->serial_number_string(), - OneByteString(env->isolate(), buf.get())).Check(); - } - } - } - - // Raw DER certificate - int size = i2d_X509(cert, nullptr); - Local buff = Buffer::New(env, size).ToLocalChecked(); - unsigned char* serialized = reinterpret_cast( - Buffer::Data(buff)); - i2d_X509(cert, &serialized); - info->Set(context, env->raw_string(), buff).Check(); - - return scope.Escape(info); -} - - -static Local AddIssuerChainToObject(X509Pointer* cert, - Local object, - StackOfX509&& peer_certs, - Environment* const env) { - Local context = env->isolate()->GetCurrentContext(); - cert->reset(sk_X509_delete(peer_certs.get(), 0)); - for (;;) { - int i; - for (i = 0; i < sk_X509_num(peer_certs.get()); i++) { - X509* ca = sk_X509_value(peer_certs.get(), i); - if (X509_check_issued(ca, cert->get()) != X509_V_OK) - continue; - - Local ca_info = X509ToObject(env, ca); - object->Set(context, env->issuercert_string(), ca_info).Check(); - object = ca_info; - - // NOTE: Intentionally freeing cert that is not used anymore. - // Delete cert and continue aggregating issuers. - cert->reset(sk_X509_delete(peer_certs.get(), i)); - break; - } - - // Issuer not found, break out of the loop. - if (i == sk_X509_num(peer_certs.get())) - break; - } - return object; -} - - -static StackOfX509 CloneSSLCerts(X509Pointer&& cert, - const STACK_OF(X509)* const ssl_certs) { - StackOfX509 peer_certs(sk_X509_new(nullptr)); - if (cert) - sk_X509_push(peer_certs.get(), cert.release()); - for (int i = 0; i < sk_X509_num(ssl_certs); i++) { - X509Pointer cert(X509_dup(sk_X509_value(ssl_certs, i))); - if (!cert || !sk_X509_push(peer_certs.get(), cert.get())) - return StackOfX509(); - // `cert` is now managed by the stack. - cert.release(); - } - return peer_certs; -} - - -static Local GetLastIssuedCert(X509Pointer* cert, - const SSLPointer& ssl, - Local issuer_chain, - Environment* const env) { - Local context = env->isolate()->GetCurrentContext(); - while (X509_check_issued(cert->get(), cert->get()) != X509_V_OK) { - X509* ca; - if (SSL_CTX_get_issuer(SSL_get_SSL_CTX(ssl.get()), cert->get(), &ca) <= 0) - break; - - Local ca_info = X509ToObject(env, ca); - issuer_chain->Set(context, env->issuercert_string(), ca_info).Check(); - issuer_chain = ca_info; - - // Delete previous cert and continue aggregating issuers. - cert->reset(ca); - } - return issuer_chain; -} - - template void SSLWrap::GetPeerCertificate( const FunctionCallbackInfo& args) { @@ -2254,44 +1858,11 @@ void SSLWrap::GetPeerCertificate( ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); - ClearErrorOnReturn clear_error_on_return; + bool abbreviated = args.Length() < 1 || !args[0]->IsTrue(); - Local result; - // Used to build the issuer certificate chain. - Local issuer_chain; - - // NOTE: This is because of the odd OpenSSL behavior. On client `cert_chain` - // contains the `peer_certificate`, but on server it doesn't. - X509Pointer cert( - w->is_server() ? SSL_get_peer_certificate(w->ssl_.get()) : nullptr); - STACK_OF(X509)* ssl_certs = SSL_get_peer_cert_chain(w->ssl_.get()); - if (!cert && (ssl_certs == nullptr || sk_X509_num(ssl_certs) == 0)) - goto done; - - // Short result requested. - if (args.Length() < 1 || !args[0]->IsTrue()) { - result = X509ToObject(env, cert ? cert.get() : sk_X509_value(ssl_certs, 0)); - goto done; - } - - if (auto peer_certs = CloneSSLCerts(std::move(cert), ssl_certs)) { - // First and main certificate. - X509Pointer cert(sk_X509_value(peer_certs.get(), 0)); - CHECK(cert); - result = X509ToObject(env, cert.release()); - - issuer_chain = - AddIssuerChainToObject(&cert, result, std::move(peer_certs), env); - issuer_chain = GetLastIssuedCert(&cert, w->ssl_, issuer_chain, env); - // Last certificate should be self-signed. - if (X509_check_issued(cert.get(), cert.get()) == X509_V_OK) - issuer_chain->Set(env->context(), - env->issuercert_string(), - issuer_chain).Check(); - } - - done: - args.GetReturnValue().Set(result); + Local ret; + if (GetPeerCert(env, w->ssl_, abbreviated, w->is_server()).ToLocal(&ret)) + args.GetReturnValue().Set(ret); } @@ -2302,16 +1873,9 @@ void SSLWrap::GetCertificate( ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); - ClearErrorOnReturn clear_error_on_return; - - Local result; - - X509* cert = SSL_get_certificate(w->ssl_.get()); - - if (cert != nullptr) - result = X509ToObject(env, cert); - - args.GetReturnValue().Set(result); + Local ret; + if (GetCert(env, w->ssl_).ToLocal(&ret)) + args.GetReturnValue().Set(ret); } @@ -2390,22 +1954,16 @@ void SSLWrap::SetSession(const FunctionCallbackInfo& args) { Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); - if (args.Length() < 1) { + if (args.Length() < 1) return THROW_ERR_MISSING_ARGS(env, "Session argument is mandatory"); - } THROW_AND_RETURN_IF_NOT_BUFFER(env, args[0], "Session"); - ArrayBufferViewContents sbuf(args[0].As()); - - const unsigned char* p = sbuf.data(); - SSLSessionPointer sess(d2i_SSL_SESSION(nullptr, &p, sbuf.length())); + SSLSessionPointer sess = GetTLSSession(args[0]); if (sess == nullptr) return; - int r = SSL_set_session(w->ssl_.get(), sess.get()); - - if (!r) + if (!SetTLSSession(w->ssl_, sess)) return env->ThrowError("SSL_set_session error"); } @@ -2521,7 +2079,6 @@ void SSLWrap::GetEphemeralKeyInfo( Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = Environment::GetCurrent(args); - Local context = env->context(); CHECK(w->ssl_); @@ -2529,51 +2086,12 @@ void SSLWrap::GetEphemeralKeyInfo( if (w->is_server()) return args.GetReturnValue().SetNull(); - Local info = Object::New(env->isolate()); + Local ret; + if (GetEphemeralKey(env, w->ssl_).ToLocal(&ret)) + args.GetReturnValue().Set(ret); - EVP_PKEY* raw_key; - if (SSL_get_server_tmp_key(w->ssl_.get(), &raw_key)) { - EVPKeyPointer key(raw_key); - int kid = EVP_PKEY_id(key.get()); - switch (kid) { - case EVP_PKEY_DH: - info->Set(context, env->type_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "DH")).Check(); - info->Set(context, env->size_string(), - Integer::New(env->isolate(), EVP_PKEY_bits(key.get()))) - .Check(); - break; - case EVP_PKEY_EC: - case EVP_PKEY_X25519: - case EVP_PKEY_X448: - { - const char* curve_name; - if (kid == EVP_PKEY_EC) { - EC_KEY* ec = EVP_PKEY_get1_EC_KEY(key.get()); - int nid = EC_GROUP_get_curve_name(EC_KEY_get0_group(ec)); - curve_name = OBJ_nid2sn(nid); - EC_KEY_free(ec); - } else { - curve_name = OBJ_nid2sn(kid); - } - info->Set(context, env->type_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "ECDH")).Check(); - info->Set(context, env->name_string(), - OneByteString(args.GetIsolate(), - curve_name)).Check(); - info->Set(context, env->size_string(), - Integer::New(env->isolate(), - EVP_PKEY_bits(key.get()))).Check(); - } - break; - default: - break; - } - } // TODO(@sam-github) semver-major: else return ThrowCryptoError(env, // ERR_get_error()) - - return args.GetReturnValue().Set(info); } @@ -2603,58 +2121,14 @@ void SSLWrap::VerifyError(const FunctionCallbackInfo& args) { // peer certificate is questionable but it's compatible with what was // here before. long x509_verify_error = // NOLINT(runtime/int) - X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT; - if (X509* peer_cert = SSL_get_peer_certificate(w->ssl_.get())) { - X509_free(peer_cert); - x509_verify_error = SSL_get_verify_result(w->ssl_.get()); - } else { - const SSL_CIPHER* curr_cipher = SSL_get_current_cipher(w->ssl_.get()); - const SSL_SESSION* sess = SSL_get_session(w->ssl_.get()); - // Allow no-cert for PSK authentication in TLS1.2 and lower. - // In TLS1.3 check that session was reused because TLS1.3 PSK - // looks like session resumption. Is there a better way? - if (SSL_CIPHER_get_auth_nid(curr_cipher) == NID_auth_psk || - (SSL_SESSION_get_protocol_version(sess) == TLS1_3_VERSION && - SSL_session_reused(w->ssl_.get()))) - return args.GetReturnValue().SetNull(); - } + VerifyPeerCertificate(w->ssl_, X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT); if (x509_verify_error == X509_V_OK) return args.GetReturnValue().SetNull(); const char* reason = X509_verify_cert_error_string(x509_verify_error); const char* code = reason; -#define CASE_X509_ERR(CODE) case X509_V_ERR_##CODE: code = #CODE; break; - switch (x509_verify_error) { - CASE_X509_ERR(UNABLE_TO_GET_ISSUER_CERT) - CASE_X509_ERR(UNABLE_TO_GET_CRL) - CASE_X509_ERR(UNABLE_TO_DECRYPT_CERT_SIGNATURE) - CASE_X509_ERR(UNABLE_TO_DECRYPT_CRL_SIGNATURE) - CASE_X509_ERR(UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY) - CASE_X509_ERR(CERT_SIGNATURE_FAILURE) - CASE_X509_ERR(CRL_SIGNATURE_FAILURE) - CASE_X509_ERR(CERT_NOT_YET_VALID) - CASE_X509_ERR(CERT_HAS_EXPIRED) - CASE_X509_ERR(CRL_NOT_YET_VALID) - CASE_X509_ERR(CRL_HAS_EXPIRED) - CASE_X509_ERR(ERROR_IN_CERT_NOT_BEFORE_FIELD) - CASE_X509_ERR(ERROR_IN_CERT_NOT_AFTER_FIELD) - CASE_X509_ERR(ERROR_IN_CRL_LAST_UPDATE_FIELD) - CASE_X509_ERR(ERROR_IN_CRL_NEXT_UPDATE_FIELD) - CASE_X509_ERR(OUT_OF_MEM) - CASE_X509_ERR(DEPTH_ZERO_SELF_SIGNED_CERT) - CASE_X509_ERR(SELF_SIGNED_CERT_IN_CHAIN) - CASE_X509_ERR(UNABLE_TO_GET_ISSUER_CERT_LOCALLY) - CASE_X509_ERR(UNABLE_TO_VERIFY_LEAF_SIGNATURE) - CASE_X509_ERR(CERT_CHAIN_TOO_LONG) - CASE_X509_ERR(CERT_REVOKED) - CASE_X509_ERR(INVALID_CA) - CASE_X509_ERR(PATH_LENGTH_EXCEEDED) - CASE_X509_ERR(INVALID_PURPOSE) - CASE_X509_ERR(CERT_UNTRUSTED) - CASE_X509_ERR(CERT_REJECTED) - } -#undef CASE_X509_ERR + code = X509ErrorCode(x509_verify_error); Isolate* isolate = args.GetIsolate(); Local reason_string = OneByteString(isolate, reason); @@ -2672,23 +2146,14 @@ void SSLWrap::GetCipher(const FunctionCallbackInfo& args) { Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); - Local context = env->context(); const SSL_CIPHER* c = SSL_get_current_cipher(w->ssl_.get()); if (c == nullptr) return; - Local info = Object::New(env->isolate()); - const char* cipher_name = SSL_CIPHER_get_name(c); - info->Set(context, env->name_string(), - OneByteString(args.GetIsolate(), cipher_name)).Check(); - const char* cipher_standard_name = SSL_CIPHER_standard_name(c); - info->Set(context, env->standard_name_string(), - OneByteString(args.GetIsolate(), cipher_standard_name)).Check(); - const char* cipher_version = SSL_CIPHER_get_version(c); - info->Set(context, env->version_string(), - OneByteString(args.GetIsolate(), cipher_version)).Check(); - args.GetReturnValue().Set(info); + Local ret; + if (GetCipherInfo(env, w->ssl_).ToLocal(&ret)) + args.GetReturnValue().Set(ret); } @@ -2772,6 +2237,40 @@ void SSLWrap::GetSharedSigalgs(const FunctionCallbackInfo& args) { Array::New(env->isolate(), ret_arr.out(), ret_arr.length())); } +template +void SSLWrap::ExportKeyingMaterial( + const FunctionCallbackInfo& args) { + CHECK(args[0]->IsInt32()); + CHECK(args[1]->IsString()); + + Base* w; + ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); + Environment* env = w->ssl_env(); + + uint32_t olen = args[0].As()->Value(); + node::Utf8Value label(env->isolate(), args[1]); + + AllocatedBuffer out = env->AllocateManaged(olen); + + ByteSource context; + bool use_context = !args[2]->IsUndefined(); + if (use_context) + context = ByteSource::FromBuffer(args[2]); + + if (SSL_export_keying_material(w->ssl_.get(), + reinterpret_cast(out.data()), + olen, + *label, + label.length(), + reinterpret_cast( + context.get()), + context.size(), + use_context) != 1) { + return ThrowCryptoError(env, ERR_get_error(), "SSL_export_keying_material"); + } + + args.GetReturnValue().Set(out.ToBuffer().ToLocalChecked()); +} template void SSLWrap::GetProtocol(const FunctionCallbackInfo& args) { @@ -2849,10 +2348,7 @@ void SSLWrap::SetALPNProtocols(const FunctionCallbackInfo& args) { return env->ThrowTypeError("Must give a Buffer as first argument"); if (w->is_client()) { - ArrayBufferViewContents alpn_protos(args[0]); - int r = SSL_set_alpn_protos( - w->ssl_.get(), alpn_protos.data(), alpn_protos.length()); - CHECK_EQ(r, 0); + CHECK(SetALPN(w->ssl_, args[0])); } else { CHECK( w->object()->SetPrivate( @@ -2875,18 +2371,10 @@ int SSLWrap::TLSExtStatusCallback(SSL* s, void* arg) { if (w->is_client()) { // Incoming response - const unsigned char* resp; - int len = SSL_get_tlsext_status_ocsp_resp(s, &resp); Local arg; - if (resp == nullptr) { - arg = Null(env->isolate()); - } else { - arg = - Buffer::Copy(env, reinterpret_cast(resp), len) - .ToLocalChecked(); - } - - w->MakeCallback(env->onocspresponse_string(), 1, &arg); + MaybeLocal ret = GetSSLOCSPResponse(env, s, Null(env->isolate())); + if (ret.ToLocal(&arg)) + w->MakeCallback(env->onocspresponse_string(), 1, &arg); // No async acceptance is possible, so always return 1 to accept the // response. The listener for 'OCSPResponse' event has no control over @@ -2945,7 +2433,7 @@ int SSLWrap::SSLCertCallback(SSL* s, void* arg) { Local info = Object::New(env->isolate()); - const char* servername = SSL_get_servername(s, TLSEXT_NAMETYPE_host_name); + const char* servername = GetServerName(s); if (servername == nullptr) { info->Set(context, env->servername_string(), @@ -2994,23 +2482,7 @@ void SSLWrap::CertCbDone(const FunctionCallbackInfo& args) { // Store the SNI context for later use. w->sni_context_ = BaseObjectPtr(sc); - int rv; - - // NOTE: reference count is not increased by this API methods - X509* x509 = SSL_CTX_get0_certificate(sc->ctx_.get()); - EVP_PKEY* pkey = SSL_CTX_get0_privatekey(sc->ctx_.get()); - STACK_OF(X509)* chain; - - rv = SSL_CTX_get0_chain_certs(sc->ctx_.get(), &chain); - if (rv) - rv = SSL_use_certificate(w->ssl_.get(), x509); - if (rv) - rv = SSL_use_PrivateKey(w->ssl_.get(), pkey); - if (rv && chain != nullptr) - rv = SSL_set1_chain(w->ssl_.get(), chain); - if (rv) - rv = w->SetCACerts(sc); - if (!rv) { + if (UseSNIContext(w->ssl_, sc) && !w->SetCACerts(sc)) { // Not clear why sometimes we throw error, and sometimes we call // onerror(). Both cause .destroy(), but onerror does a bit more. unsigned long err = ERR_get_error(); // NOLINT(runtime/int) @@ -3749,7 +3221,8 @@ EVP_PKEY* ManagedEVPPKey::get() const { Local KeyObject::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + KeyObject::kInternalFieldCount); env->SetProtoMethod(t, "init", Init); env->SetProtoMethodNoSideEffect(t, "getSymmetricKeySize", @@ -3814,6 +3287,15 @@ KeyType KeyObject::GetKeyType() const { return this->key_type_; } +KeyObject::KeyObject(Environment* env, + v8::Local wrap, + KeyType key_type) + : BaseObject(env, wrap), + key_type_(key_type), + symmetric_key_(nullptr, nullptr) { + MakeWeak(); +} + void KeyObject::Init(const FunctionCallbackInfo& args) { KeyObject* key; ASSIGN_OR_RETURN_UNWRAP(&key, args.Holder()); @@ -3958,11 +3440,23 @@ MaybeLocal KeyObject::ExportPrivateKey( return WritePrivateKey(env(), asymmetric_key_.get(), config); } +CipherBase::CipherBase(Environment* env, + v8::Local wrap, + CipherKind kind) + : BaseObject(env, wrap), + ctx_(nullptr), + kind_(kind), + auth_tag_state_(kAuthTagUnknown), + auth_tag_len_(kNoAuthTagLength), + pending_auth_failed_(false) { + MakeWeak(); +} void CipherBase::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + CipherBase::kInternalFieldCount); env->SetProtoMethod(t, "init", Init); env->SetProtoMethod(t, "initiv", InitIv); @@ -4178,7 +3672,7 @@ void CipherBase::InitIv(const FunctionCallbackInfo& args) { reinterpret_cast(key.get()), key.size(), iv_buf.data(), - iv_len, + static_cast(iv_len), auth_tag_len); } @@ -4580,11 +4074,17 @@ void CipherBase::Final(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(out.ToBuffer().ToLocalChecked()); } +Hmac::Hmac(Environment* env, v8::Local wrap) + : BaseObject(env, wrap), + ctx_(nullptr) { + MakeWeak(); +} void Hmac::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + Hmac::kInternalFieldCount); env->SetProtoMethod(t, "init", HmacInit); env->SetProtoMethod(t, "update", HmacUpdate); @@ -4699,11 +4199,19 @@ void Hmac::HmacDigest(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(rc.ToLocalChecked()); } +Hash::Hash(Environment* env, v8::Local wrap) + : BaseObject(env, wrap), + mdctx_(nullptr), + has_md_(false), + md_value_(nullptr) { + MakeWeak(); +} void Hash::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + Hash::kInternalFieldCount); env->SetProtoMethod(t, "update", HashUpdate); env->SetProtoMethod(t, "digest", HashDigest); @@ -4713,6 +4221,10 @@ void Hash::Initialize(Environment* env, Local target) { t->GetFunction(env->context()).ToLocalChecked()).Check(); } +Hash::~Hash() { + if (md_value_ != nullptr) + OPENSSL_clear_free(md_value_, md_len_); +} void Hash::New(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -4937,6 +4449,10 @@ void CheckThrow(Environment* env, SignBase::Error error) { } } +SignBase::SignBase(Environment* env, v8::Local wrap) + : BaseObject(env, wrap) { +} + void SignBase::CheckThrow(SignBase::Error error) { node::crypto::CheckThrow(env(), error); } @@ -4960,11 +4476,15 @@ static bool ApplyRSAOptions(const ManagedEVPPKey& pkey, } +Sign::Sign(Environment* env, v8::Local wrap) : SignBase(env, wrap) { + MakeWeak(); +} void Sign::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + SignBase::kInternalFieldCount); env->SetProtoMethod(t, "init", SignInit); env->SetProtoMethod(t, "update", SignUpdate); @@ -5280,10 +4800,16 @@ void SignOneShot(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(signature.ToBuffer().ToLocalChecked()); } +Verify::Verify(Environment* env, v8::Local wrap) : + SignBase(env, wrap) { + MakeWeak(); +} + void Verify::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + SignBase::kInternalFieldCount); env->SetProtoMethod(t, "init", VerifyInit); env->SetProtoMethod(t, "update", VerifyUpdate); @@ -5323,8 +4849,7 @@ void Verify::VerifyUpdate(const FunctionCallbackInfo& args) { SignBase::Error Verify::VerifyFinal(const ManagedEVPPKey& pkey, - const char* sig, - int siglen, + const ByteSource& sig, int padding, const Maybe& saltlen, bool* verify_result) { @@ -5345,11 +4870,8 @@ SignBase::Error Verify::VerifyFinal(const ManagedEVPPKey& pkey, ApplyRSAOptions(pkey, pkctx.get(), padding, saltlen) && EVP_PKEY_CTX_set_signature_md(pkctx.get(), EVP_MD_CTX_md(mdctx.get())) > 0) { - const int r = EVP_PKEY_verify(pkctx.get(), - reinterpret_cast(sig), - siglen, - m, - m_len); + const unsigned char* s = reinterpret_cast(sig.get()); + const int r = EVP_PKEY_verify(pkctx.get(), s, sig.size(), m, m_len); *verify_result = r == 1; } @@ -5394,7 +4916,7 @@ void Verify::VerifyFinal(const FunctionCallbackInfo& args) { } bool verify_result; - Error err = verify->VerifyFinal(pkey, hbuf.data(), hbuf.length(), padding, + Error err = verify->VerifyFinal(pkey, signature, padding, salt_len, &verify_result); if (err != kSignOk) return verify->CheckThrow(err); @@ -5587,6 +5109,10 @@ void PublicKeyCipher::Cipher(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(out.ToBuffer().ToLocalChecked()); } +DiffieHellman::DiffieHellman(Environment* env, v8::Local wrap) + : BaseObject(env, wrap), verifyError_(0) { + MakeWeak(); +} void DiffieHellman::Initialize(Environment* env, Local target) { auto make = [&] (Local name, FunctionCallback callback) { @@ -5595,7 +5121,8 @@ void DiffieHellman::Initialize(Environment* env, Local target) { const PropertyAttribute attributes = static_cast(ReadOnly | DontDelete); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + DiffieHellman::kInternalFieldCount); env->SetProtoMethod(t, "generateKeys", GenerateKeys); env->SetProtoMethod(t, "computeSecret", ComputeSecret); @@ -5832,11 +5359,7 @@ void DiffieHellman::ComputeSecret(const FunctionCallbackInfo& args) { ClearErrorOnReturn clear_error_on_return; - if (args.Length() == 0) { - return THROW_ERR_MISSING_ARGS( - env, "Other party's public key argument is mandatory"); - } - + CHECK_EQ(args.Length(), 1); THROW_AND_RETURN_IF_NOT_BUFFER(env, args[0], "Other party's public key"); ArrayBufferViewContents key_buf(args[0].As()); BignumPointer key(BN_bin2bn(key_buf.data(), key_buf.length(), nullptr)); @@ -5887,11 +5410,7 @@ void DiffieHellman::SetKey(const FunctionCallbackInfo& args, char errmsg[64]; - if (args.Length() == 0) { - snprintf(errmsg, sizeof(errmsg), "%s argument is mandatory", what); - return THROW_ERR_MISSING_ARGS(env, errmsg); - } - + CHECK_EQ(args.Length(), 1); if (!Buffer::HasInstance(args[0])) { snprintf(errmsg, sizeof(errmsg), "%s must be a buffer", what); return THROW_ERR_INVALID_ARG_TYPE(env, errmsg); @@ -5942,7 +5461,7 @@ void ECDH::Initialize(Environment* env, Local target) { Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount(ECDH::kInternalFieldCount); env->SetProtoMethod(t, "generateKeys", GenerateKeys); env->SetProtoMethod(t, "computeSecret", ComputeSecret); @@ -5956,6 +5475,15 @@ void ECDH::Initialize(Environment* env, Local target) { t->GetFunction(env->context()).ToLocalChecked()).Check(); } +ECDH::ECDH(Environment* env, v8::Local wrap, ECKeyPointer&& key) + : BaseObject(env, wrap), + key_(std::move(key)), + group_(EC_KEY_get0_group(key_.get())) { + MakeWeak(); + CHECK_NOT_NULL(group_); +} + +ECDH::~ECDH() {} void ECDH::New(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -6799,15 +6327,8 @@ void GenerateKeyPair(const FunctionCallbackInfo& args, Local err, pubkey, privkey; job->ToResult(&err, &pubkey, &privkey); - bool (*IsNotTrue)(Maybe) = [](Maybe maybe) { - return maybe.IsNothing() || !maybe.ToChecked(); - }; - Local ret = Array::New(env->isolate(), 3); - if (IsNotTrue(ret->Set(env->context(), 0, err)) || - IsNotTrue(ret->Set(env->context(), 1, pubkey)) || - IsNotTrue(ret->Set(env->context(), 2, privkey))) - return; - args.GetReturnValue().Set(ret); + Local ret[] = { err, pubkey, privkey }; + args.GetReturnValue().Set(Array::New(env->isolate(), ret, arraysize(ret))); } void GenerateKeyPairRSA(const FunctionCallbackInfo& args) { @@ -6940,17 +6461,6 @@ void GetSSLCiphers(const FunctionCallbackInfo& args) { CHECK(ssl); STACK_OF(SSL_CIPHER)* ciphers = SSL_get_ciphers(ssl.get()); - int n = sk_SSL_CIPHER_num(ciphers); - Local arr = Array::New(env->isolate(), n); - - for (int i = 0; i < n; ++i) { - const SSL_CIPHER* cipher = sk_SSL_CIPHER_value(ciphers, i); - arr->Set(env->context(), - i, - OneByteString(args.GetIsolate(), - SSL_CIPHER_get_name(cipher))).Check(); - } - // TLSv1.3 ciphers aren't listed by EVP. There are only 5, we could just // document them, but since there are only 5, easier to just add them manually // and not have to explain their absence in the API docs. They are lower-cased @@ -6963,13 +6473,20 @@ void GetSSLCiphers(const FunctionCallbackInfo& args) { "tls_aes_128_ccm_sha256" }; + const int n = sk_SSL_CIPHER_num(ciphers); + std::vector> arr(n + arraysize(TLS13_CIPHERS)); + + for (int i = 0; i < n; ++i) { + const SSL_CIPHER* cipher = sk_SSL_CIPHER_value(ciphers, i); + arr[i] = OneByteString(env->isolate(), SSL_CIPHER_get_name(cipher)); + } + for (unsigned i = 0; i < arraysize(TLS13_CIPHERS); ++i) { const char* name = TLS13_CIPHERS[i]; - arr->Set(env->context(), - arr->Length(), OneByteString(args.GetIsolate(), name)).Check(); + arr[n + i] = OneByteString(env->isolate(), name); } - args.GetReturnValue().Set(arr); + args.GetReturnValue().Set(Array::New(env->isolate(), arr.data(), arr.size())); } @@ -7020,22 +6537,23 @@ void GetHashes(const FunctionCallbackInfo& args) { void GetCurves(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); const size_t num_curves = EC_get_builtin_curves(nullptr, 0); - Local arr = Array::New(env->isolate(), num_curves); if (num_curves) { std::vector curves(num_curves); if (EC_get_builtin_curves(curves.data(), num_curves)) { - for (size_t i = 0; i < num_curves; i++) { - arr->Set(env->context(), - i, - OneByteString(env->isolate(), - OBJ_nid2sn(curves[i].nid))).Check(); - } + std::vector> arr(num_curves); + + for (size_t i = 0; i < num_curves; i++) + arr[i] = OneByteString(env->isolate(), OBJ_nid2sn(curves[i].nid)); + + args.GetReturnValue().Set( + Array::New(env->isolate(), arr.data(), arr.size())); + return; } } - args.GetReturnValue().Set(arr); + args.GetReturnValue().Set(Array::New(env->isolate())); } diff --git a/src/node_crypto.h b/src/node_crypto.h index b57dc29de29..772a34a7da7 100644 --- a/src/node_crypto.h +++ b/src/node_crypto.h @@ -84,14 +84,14 @@ extern void UseExtraCaCerts(const std::string& file); void InitCryptoOnce(); -class SecureContext : public BaseObject { +class SecureContext final : public BaseObject { public: - ~SecureContext() override { - Reset(); - } + ~SecureContext() override; static void Initialize(Environment* env, v8::Local target); + SSL_CTX* operator*() const { return ctx_.get(); } + // TODO(joyeecheung): track the memory used by OpenSSL types SET_NO_MEMORY_INFO() SET_MEMORY_INFO_NAME(SecureContext) @@ -177,20 +177,8 @@ class SecureContext : public BaseObject { HMAC_CTX* hctx, int enc); - SecureContext(Environment* env, v8::Local wrap) - : BaseObject(env, wrap) { - MakeWeak(); - env->isolate()->AdjustAmountOfExternalAllocatedMemory(kExternalSize); - } - - inline void Reset() { - if (ctx_ != nullptr) { - env()->isolate()->AdjustAmountOfExternalAllocatedMemory(-kExternalSize); - } - ctx_.reset(); - cert_.reset(); - issuer_.reset(); - } + SecureContext(Environment* env, v8::Local wrap); + void Reset(); }; // SSLWrap implicitly depends on the inheriting class' handle having an @@ -263,6 +251,8 @@ class SSLWrap { static void VerifyError(const v8::FunctionCallbackInfo& args); static void GetCipher(const v8::FunctionCallbackInfo& args); static void GetSharedSigalgs(const v8::FunctionCallbackInfo& args); + static void ExportKeyingMaterial( + const v8::FunctionCallbackInfo& args); static void EndParser(const v8::FunctionCallbackInfo& args); static void CertCbDone(const v8::FunctionCallbackInfo& args); static void Renegotiate(const v8::FunctionCallbackInfo& args); @@ -461,14 +451,7 @@ class KeyObject : public BaseObject { v8::MaybeLocal ExportPrivateKey( const PrivateKeyEncodingConfig& config) const; - KeyObject(Environment* env, - v8::Local wrap, - KeyType key_type) - : BaseObject(env, wrap), - key_type_(key_type), - symmetric_key_(nullptr, nullptr) { - MakeWeak(); - } + KeyObject(Environment* env, v8::Local wrap, KeyType key_type); private: const KeyType key_type_; @@ -542,17 +525,7 @@ class CipherBase : public BaseObject { static void SetAuthTag(const v8::FunctionCallbackInfo& args); static void SetAAD(const v8::FunctionCallbackInfo& args); - CipherBase(Environment* env, - v8::Local wrap, - CipherKind kind) - : BaseObject(env, wrap), - ctx_(nullptr), - kind_(kind), - auth_tag_state_(kAuthTagUnknown), - auth_tag_len_(kNoAuthTagLength), - pending_auth_failed_(false) { - MakeWeak(); - } + CipherBase(Environment* env, v8::Local wrap, CipherKind kind); private: DeleteFnPtr ctx_; @@ -582,18 +555,16 @@ class Hmac : public BaseObject { static void HmacUpdate(const v8::FunctionCallbackInfo& args); static void HmacDigest(const v8::FunctionCallbackInfo& args); - Hmac(Environment* env, v8::Local wrap) - : BaseObject(env, wrap), - ctx_(nullptr) { - MakeWeak(); - } + Hmac(Environment* env, v8::Local wrap); private: DeleteFnPtr ctx_; }; -class Hash : public BaseObject { +class Hash final : public BaseObject { public: + ~Hash() override; + static void Initialize(Environment* env, v8::Local target); // TODO(joyeecheung): track the memory used by OpenSSL types @@ -609,18 +580,7 @@ class Hash : public BaseObject { static void HashUpdate(const v8::FunctionCallbackInfo& args); static void HashDigest(const v8::FunctionCallbackInfo& args); - Hash(Environment* env, v8::Local wrap) - : BaseObject(env, wrap), - mdctx_(nullptr), - has_md_(false), - md_value_(nullptr) { - MakeWeak(); - } - - ~Hash() override { - if (md_value_ != nullptr) - OPENSSL_clear_free(md_value_, md_len_); - } + Hash(Environment* env, v8::Local wrap); private: EVPMDPointer mdctx_; @@ -642,9 +602,7 @@ class SignBase : public BaseObject { kSignMalformedSignature } Error; - SignBase(Environment* env, v8::Local wrap) - : BaseObject(env, wrap) { - } + SignBase(Environment* env, v8::Local wrap); Error Init(const char* sign_type); Error Update(const char* data, int len); @@ -690,9 +648,7 @@ class Sign : public SignBase { static void SignUpdate(const v8::FunctionCallbackInfo& args); static void SignFinal(const v8::FunctionCallbackInfo& args); - Sign(Environment* env, v8::Local wrap) : SignBase(env, wrap) { - MakeWeak(); - } + Sign(Environment* env, v8::Local wrap); }; class Verify : public SignBase { @@ -700,8 +656,7 @@ class Verify : public SignBase { static void Initialize(Environment* env, v8::Local target); Error VerifyFinal(const ManagedEVPPKey& key, - const char* sig, - int siglen, + const ByteSource& sig, int padding, const v8::Maybe& saltlen, bool* verify_result); @@ -712,9 +667,7 @@ class Verify : public SignBase { static void VerifyUpdate(const v8::FunctionCallbackInfo& args); static void VerifyFinal(const v8::FunctionCallbackInfo& args); - Verify(Environment* env, v8::Local wrap) : SignBase(env, wrap) { - MakeWeak(); - } + Verify(Environment* env, v8::Local wrap); }; class PublicKeyCipher { @@ -771,11 +724,7 @@ class DiffieHellman : public BaseObject { static void VerifyErrorGetter( const v8::FunctionCallbackInfo& args); - DiffieHellman(Environment* env, v8::Local wrap) - : BaseObject(env, wrap), - verifyError_(0) { - MakeWeak(); - } + DiffieHellman(Environment* env, v8::Local wrap); // TODO(joyeecheung): track the memory used by OpenSSL types SET_NO_MEMORY_INFO() @@ -794,11 +743,9 @@ class DiffieHellman : public BaseObject { DHPointer dh_; }; -class ECDH : public BaseObject { +class ECDH final : public BaseObject { public: - ~ECDH() override { - group_ = nullptr; - } + ~ECDH() override; static void Initialize(Environment* env, v8::Local target); static ECPointPointer BufferToPoint(Environment* env, @@ -811,13 +758,7 @@ class ECDH : public BaseObject { SET_SELF_SIZE(ECDH) protected: - ECDH(Environment* env, v8::Local wrap, ECKeyPointer&& key) - : BaseObject(env, wrap), - key_(std::move(key)), - group_(EC_KEY_get0_group(key_.get())) { - MakeWeak(); - CHECK_NOT_NULL(group_); - } + ECDH(Environment* env, v8::Local wrap, ECKeyPointer&& key); static void New(const v8::FunctionCallbackInfo& args); static void GenerateKeys(const v8::FunctionCallbackInfo& args); @@ -840,6 +781,17 @@ void SetEngine(const v8::FunctionCallbackInfo& args); #endif // !OPENSSL_NO_ENGINE void InitCrypto(v8::Local target); +void ThrowCryptoError(Environment* env, + unsigned long err, // NOLINT(runtime/int) + const char* message = nullptr); + +template +inline T* MallocOpenSSL(size_t count) { + void* mem = OPENSSL_malloc(MultiplyWithOverflowCheck(count, sizeof(T))); + CHECK_IMPLIES(mem == nullptr, count == 0); + return static_cast(mem); +} + } // namespace crypto } // namespace node diff --git a/src/node_crypto_bio.cc b/src/node_crypto_bio.cc index fc143043ba5..55f5e8a5a37 100644 --- a/src/node_crypto_bio.cc +++ b/src/node_crypto_bio.cc @@ -438,6 +438,13 @@ void NodeBIO::TryAllocateForWrite(size_t hint) { kThroughputBufferLength; if (len < hint) len = hint; + + // If there is a one time allocation size hint, use it. + if (allocate_hint_ > len) { + len = allocate_hint_; + allocate_hint_ = 0; + } + Buffer* next = new Buffer(env_, len); if (w == nullptr) { diff --git a/src/node_crypto_bio.h b/src/node_crypto_bio.h index 5de943806a9..333a50848c7 100644 --- a/src/node_crypto_bio.h +++ b/src/node_crypto_bio.h @@ -96,6 +96,21 @@ class NodeBIO : public MemoryRetainer { return length_; } + // Provide a hint about the size of the next pending set of writes. TLS + // writes records of a maximum length of 16k of data plus a 5-byte header, + // a MAC (up to 20 bytes for SSLv3, TLS 1.0, TLS 1.1, and up to 32 bytes + // for TLS 1.2), and padding if a block cipher is used. If there is a + // large write this will result in potentially many buffers being + // allocated and gc'ed which can cause long pauses. By providing a + // guess about the amount of buffer space that will be needed in the + // next allocation this overhead is removed. + inline void set_allocate_tls_hint(size_t size) { + constexpr size_t kThreshold = 16 * 1024; + if (size >= kThreshold) { + allocate_hint_ = (size / kThreshold + 1) * (kThreshold + 5 + 32); + } + } + inline void set_eof_return(int num) { eof_return_ = num; } @@ -164,6 +179,7 @@ class NodeBIO : public MemoryRetainer { Environment* env_ = nullptr; size_t initial_ = kInitialBufferLength; size_t length_ = 0; + size_t allocate_hint_ = 0; int eof_return_ = -1; Buffer* read_head_ = nullptr; Buffer* write_head_ = nullptr; diff --git a/src/node_crypto_common.cc b/src/node_crypto_common.cc new file mode 100644 index 00000000000..197bc5cd591 --- /dev/null +++ b/src/node_crypto_common.cc @@ -0,0 +1,1110 @@ +#include "env-inl.h" +#include "node_buffer.h" +#include "node_crypto.h" +#include "node_crypto_common.h" +#include "node.h" +#include "node_internals.h" +#include "node_url.h" +#include "string_bytes.h" +#include "v8.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace node { + +using v8::Array; +using v8::ArrayBufferView; +using v8::Context; +using v8::EscapableHandleScope; +using v8::Integer; +using v8::Local; +using v8::MaybeLocal; +using v8::NewStringType; +using v8::Null; +using v8::Object; +using v8::String; +using v8::Value; + +namespace crypto { + +static constexpr int X509_NAME_FLAGS = + ASN1_STRFLGS_ESC_CTRL | + ASN1_STRFLGS_UTF8_CONVERT | + XN_FLAG_SEP_MULTILINE | + XN_FLAG_FN_SN; + +int SSL_CTX_get_issuer(SSL_CTX* ctx, X509* cert, X509** issuer) { + X509_STORE* store = SSL_CTX_get_cert_store(ctx); + DeleteFnPtr store_ctx( + X509_STORE_CTX_new()); + return store_ctx.get() != nullptr && + X509_STORE_CTX_init(store_ctx.get(), store, nullptr, nullptr) == 1 && + X509_STORE_CTX_get1_issuer(issuer, store_ctx.get(), cert) == 1; +} + +void LogSecret( + const SSLPointer& ssl, + const char* name, + const unsigned char* secret, + size_t secretlen) { + auto keylog_cb = SSL_CTX_get_keylog_callback(SSL_get_SSL_CTX(ssl.get())); + unsigned char crandom[32]; + + if (keylog_cb == nullptr || + SSL_get_client_random(ssl.get(), crandom, 32) != 32) { + return; + } + + std::string line = name; + line += " " + StringBytes::hex_encode( + reinterpret_cast(crandom), 32); + line += " " + StringBytes::hex_encode( + reinterpret_cast(secret), secretlen); + keylog_cb(ssl.get(), line.c_str()); +} + +bool SetALPN(const SSLPointer& ssl, const std::string& alpn) { + return SSL_set_alpn_protos( + ssl.get(), + reinterpret_cast(alpn.c_str()), + alpn.length()) == 0; +} + +bool SetALPN(const SSLPointer& ssl, Local alpn) { + if (!alpn->IsArrayBufferView()) + return false; + ArrayBufferViewContents protos(alpn.As()); + return SSL_set_alpn_protos(ssl.get(), protos.data(), protos.length()) == 0; +} + +MaybeLocal GetSSLOCSPResponse( + Environment* env, + SSL* ssl, + Local default_value) { + const unsigned char* resp; + int len = SSL_get_tlsext_status_ocsp_resp(ssl, &resp); + if (resp == nullptr) + return default_value; + + Local ret; + MaybeLocal maybe_buffer = + Buffer::Copy(env, reinterpret_cast(resp), len); + + if (!maybe_buffer.ToLocal(&ret)) + return MaybeLocal(); + + return ret; +} + +bool SetTLSSession( + const SSLPointer& ssl, + const unsigned char* buf, + size_t length) { + SSLSessionPointer s(d2i_SSL_SESSION(nullptr, &buf, length)); + return s == nullptr ? false : SetTLSSession(ssl, s); +} + +bool SetTLSSession( + const SSLPointer& ssl, + const SSLSessionPointer& session) { + return session != nullptr && SSL_set_session(ssl.get(), session.get()) == 1; +} + +SSLSessionPointer GetTLSSession(Local val) { + if (!val->IsArrayBufferView()) + return SSLSessionPointer(); + ArrayBufferViewContents sbuf(val.As()); + return GetTLSSession(sbuf.data(), sbuf.length()); +} + +SSLSessionPointer GetTLSSession(const unsigned char* buf, size_t length) { + return SSLSessionPointer(d2i_SSL_SESSION(nullptr, &buf, length)); +} + +std::unordered_multimap +GetCertificateAltNames(X509* cert) { + std::unordered_multimap map; + BIOPointer bio(BIO_new(BIO_s_mem())); + BUF_MEM* mem; + int idx = X509_get_ext_by_NID(cert, NID_subject_alt_name, -1); + if (idx < 0) // There is no subject alt name + return map; + + X509_EXTENSION* ext = X509_get_ext(cert, idx); + CHECK_NOT_NULL(ext); + const X509V3_EXT_METHOD* method = X509V3_EXT_get(ext); + CHECK_EQ(method, X509V3_EXT_get_nid(NID_subject_alt_name)); + + GENERAL_NAMES* names = static_cast(X509V3_EXT_d2i(ext)); + if (names == nullptr) // There are no names + return map; + + for (int i = 0; i < sk_GENERAL_NAME_num(names); i++) { + USE(BIO_reset(bio.get())); + GENERAL_NAME* gen = sk_GENERAL_NAME_value(names, i); + if (gen->type == GEN_DNS) { + ASN1_IA5STRING* name = gen->d.dNSName; + BIO_write(bio.get(), name->data, name->length); + BIO_get_mem_ptr(bio.get(), &mem); + map.emplace("dns", std::string(mem->data, mem->length)); + } else { + STACK_OF(CONF_VALUE)* nval = i2v_GENERAL_NAME( + const_cast(method), gen, nullptr); + if (nval == nullptr) + continue; + X509V3_EXT_val_prn(bio.get(), nval, 0, 0); + sk_CONF_VALUE_pop_free(nval, X509V3_conf_free); + BIO_get_mem_ptr(bio.get(), &mem); + std::string value(mem->data, mem->length); + if (value.compare(0, 11, "IP Address:") == 0) { + map.emplace("ip", value.substr(11)); + } else if (value.compare(0, 4, "URI:") == 0) { + url::URL url(value.substr(4)); + if (url.flags() & url::URL_FLAGS_CANNOT_BE_BASE || + url.flags() & url::URL_FLAGS_FAILED) { + continue; // Skip this one + } + map.emplace("uri", url.host()); + } + } + } + sk_GENERAL_NAME_pop_free(names, GENERAL_NAME_free); + return map; +} + +std::string GetCertificateCN(X509* cert) { + X509_NAME* subject = X509_get_subject_name(cert); + if (subject != nullptr) { + int nid = OBJ_txt2nid("CN"); + int idx = X509_NAME_get_index_by_NID(subject, nid, -1); + if (idx != -1) { + X509_NAME_ENTRY* cn = X509_NAME_get_entry(subject, idx); + if (cn != nullptr) { + ASN1_STRING* cn_str = X509_NAME_ENTRY_get_data(cn); + if (cn_str != nullptr) { + return std::string(reinterpret_cast( + ASN1_STRING_get0_data(cn_str))); + } + } + } + } + return std::string(); +} + +long VerifyPeerCertificate( // NOLINT(runtime/int) + const SSLPointer& ssl, + long def) { // NOLINT(runtime/int) + long err = def; // NOLINT(runtime/int) + if (X509* peer_cert = SSL_get_peer_certificate(ssl.get())) { + X509_free(peer_cert); + err = SSL_get_verify_result(ssl.get()); + } else { + const SSL_CIPHER* curr_cipher = SSL_get_current_cipher(ssl.get()); + const SSL_SESSION* sess = SSL_get_session(ssl.get()); + // Allow no-cert for PSK authentication in TLS1.2 and lower. + // In TLS1.3 check that session was reused because TLS1.3 PSK + // looks like session resumption. + if (SSL_CIPHER_get_auth_nid(curr_cipher) == NID_auth_psk || + (SSL_SESSION_get_protocol_version(sess) == TLS1_3_VERSION && + SSL_session_reused(ssl.get()))) { + return X509_V_OK; + } + } + return err; +} + +int UseSNIContext(const SSLPointer& ssl, SecureContext* context) { + SSL_CTX* ctx = context->ctx_.get(); + X509* x509 = SSL_CTX_get0_certificate(ctx); + EVP_PKEY* pkey = SSL_CTX_get0_privatekey(ctx); + STACK_OF(X509)* chain; + + int err = SSL_CTX_get0_chain_certs(ctx, &chain); + if (err == 1) err = SSL_use_certificate(ssl.get(), x509); + if (err == 1) err = SSL_use_PrivateKey(ssl.get(), pkey); + if (err == 1 && chain != nullptr) err = SSL_set1_chain(ssl.get(), chain); + return err; +} + +const char* GetClientHelloALPN(const SSLPointer& ssl) { + const unsigned char* buf; + size_t len; + size_t rem; + + if (!SSL_client_hello_get0_ext( + ssl.get(), + TLSEXT_TYPE_application_layer_protocol_negotiation, + &buf, + &rem) || + rem < 2) { + return nullptr; + } + + len = (buf[0] << 8) | buf[1]; + if (len + 2 != rem) return nullptr; + return reinterpret_cast(buf + 3); +} + +const char* GetClientHelloServerName(const SSLPointer& ssl) { + const unsigned char* buf; + size_t len; + size_t rem; + + if (!SSL_client_hello_get0_ext( + ssl.get(), + TLSEXT_TYPE_server_name, + &buf, + &rem) || rem <= 2) { + return nullptr; + } + + len = (*buf << 8) | *(buf + 1); + if (len + 2 != rem) + return nullptr; + rem = len; + + if (rem == 0 || *(buf + 2) != TLSEXT_NAMETYPE_host_name) return nullptr; + rem--; + if (rem <= 2) + return nullptr; + len = (*(buf + 3) << 8) | *(buf + 4); + if (len + 2 > rem) + return nullptr; + return reinterpret_cast(buf + 5); +} + +const char* GetServerName(SSL* ssl) { + return SSL_get_servername(ssl, TLSEXT_NAMETYPE_host_name); +} + +bool SetGroups(SecureContext* sc, const char* groups) { + return SSL_CTX_set1_groups_list(**sc, groups) == 1; +} + +const char* X509ErrorCode(long err) { // NOLINT(runtime/int) + const char* code = "UNSPECIFIED"; +#define CASE_X509_ERR(CODE) case X509_V_ERR_##CODE: code = #CODE; break; + switch (err) { + CASE_X509_ERR(UNABLE_TO_GET_ISSUER_CERT) + CASE_X509_ERR(UNABLE_TO_GET_CRL) + CASE_X509_ERR(UNABLE_TO_DECRYPT_CERT_SIGNATURE) + CASE_X509_ERR(UNABLE_TO_DECRYPT_CRL_SIGNATURE) + CASE_X509_ERR(UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY) + CASE_X509_ERR(CERT_SIGNATURE_FAILURE) + CASE_X509_ERR(CRL_SIGNATURE_FAILURE) + CASE_X509_ERR(CERT_NOT_YET_VALID) + CASE_X509_ERR(CERT_HAS_EXPIRED) + CASE_X509_ERR(CRL_NOT_YET_VALID) + CASE_X509_ERR(CRL_HAS_EXPIRED) + CASE_X509_ERR(ERROR_IN_CERT_NOT_BEFORE_FIELD) + CASE_X509_ERR(ERROR_IN_CERT_NOT_AFTER_FIELD) + CASE_X509_ERR(ERROR_IN_CRL_LAST_UPDATE_FIELD) + CASE_X509_ERR(ERROR_IN_CRL_NEXT_UPDATE_FIELD) + CASE_X509_ERR(OUT_OF_MEM) + CASE_X509_ERR(DEPTH_ZERO_SELF_SIGNED_CERT) + CASE_X509_ERR(SELF_SIGNED_CERT_IN_CHAIN) + CASE_X509_ERR(UNABLE_TO_GET_ISSUER_CERT_LOCALLY) + CASE_X509_ERR(UNABLE_TO_VERIFY_LEAF_SIGNATURE) + CASE_X509_ERR(CERT_CHAIN_TOO_LONG) + CASE_X509_ERR(CERT_REVOKED) + CASE_X509_ERR(INVALID_CA) + CASE_X509_ERR(PATH_LENGTH_EXCEEDED) + CASE_X509_ERR(INVALID_PURPOSE) + CASE_X509_ERR(CERT_UNTRUSTED) + CASE_X509_ERR(CERT_REJECTED) + CASE_X509_ERR(HOSTNAME_MISMATCH) + } +#undef CASE_X509_ERR + return code; +} + +MaybeLocal GetValidationErrorReason(Environment* env, int err) { + const char* reason = X509_verify_cert_error_string(err); + return OneByteString(env->isolate(), reason); +} + +MaybeLocal GetValidationErrorCode(Environment* env, int err) { + return OneByteString(env->isolate(), X509ErrorCode(err)); +} + +MaybeLocal GetCert(Environment* env, const SSLPointer& ssl) { + ClearErrorOnReturn clear_error_on_return; + X509* cert = SSL_get_certificate(ssl.get()); + if (cert == nullptr) + return Undefined(env->isolate()); + + Local ret; + MaybeLocal maybe_cert = X509ToObject(env, cert); + return maybe_cert.ToLocal(&ret) ? ret : MaybeLocal(); +} + +namespace { +template +bool Set( + Local context, + Local target, + Local name, + MaybeLocal maybe_value) { + Local value; + if (!maybe_value.ToLocal(&value)) + return false; + + // Undefined is ignored, but still considered successful + if (value->IsUndefined()) + return true; + + return !target->Set(context, name, value).IsNothing(); +} + +Local ToV8Value(Environment* env, const BIOPointer& bio) { + BUF_MEM* mem; + BIO_get_mem_ptr(bio.get(), &mem); + MaybeLocal ret = + String::NewFromUtf8( + env->isolate(), + mem->data, + NewStringType::kNormal, + mem->length); + USE(BIO_reset(bio.get())); + return ret.FromMaybe(Local()); +} + +MaybeLocal GetCipherName( + Environment* env, + const SSL_CIPHER* cipher) { + if (cipher == nullptr) + return Undefined(env->isolate()); + + return OneByteString(env->isolate(), SSL_CIPHER_get_name(cipher)); +} + +MaybeLocal GetCipherStandardName( + Environment* env, + const SSL_CIPHER* cipher) { + if (cipher == nullptr) + return Undefined(env->isolate()); + + return OneByteString(env->isolate(), SSL_CIPHER_standard_name(cipher)); +} + +MaybeLocal GetCipherVersion( + Environment* env, + const SSL_CIPHER* cipher) { + if (cipher == nullptr) + return Undefined(env->isolate()); + + return OneByteString(env->isolate(), SSL_CIPHER_get_version(cipher)); +} + +StackOfX509 CloneSSLCerts(X509Pointer&& cert, + const STACK_OF(X509)* const ssl_certs) { + StackOfX509 peer_certs(sk_X509_new(nullptr)); + if (cert) + sk_X509_push(peer_certs.get(), cert.release()); + for (int i = 0; i < sk_X509_num(ssl_certs); i++) { + X509Pointer cert(X509_dup(sk_X509_value(ssl_certs, i))); + if (!cert || !sk_X509_push(peer_certs.get(), cert.get())) + return StackOfX509(); + // `cert` is now managed by the stack. + cert.release(); + } + return peer_certs; +} + +MaybeLocal AddIssuerChainToObject( + X509Pointer* cert, + Local object, + StackOfX509&& peer_certs, + Environment* const env) { + Local context = env->isolate()->GetCurrentContext(); + cert->reset(sk_X509_delete(peer_certs.get(), 0)); + for (;;) { + int i; + for (i = 0; i < sk_X509_num(peer_certs.get()); i++) { + X509* ca = sk_X509_value(peer_certs.get(), i); + if (X509_check_issued(ca, cert->get()) != X509_V_OK) + continue; + + Local ca_info; + MaybeLocal maybe_ca_info = X509ToObject(env, ca); + if (!maybe_ca_info.ToLocal(&ca_info)) + return MaybeLocal(); + + if (!Set(context, object, env->issuercert_string(), ca_info)) + return MaybeLocal(); + object = ca_info; + + // NOTE: Intentionally freeing cert that is not used anymore. + // Delete cert and continue aggregating issuers. + cert->reset(sk_X509_delete(peer_certs.get(), i)); + break; + } + + // Issuer not found, break out of the loop. + if (i == sk_X509_num(peer_certs.get())) + break; + } + return MaybeLocal(object); +} + +MaybeLocal GetLastIssuedCert( + X509Pointer* cert, + const SSLPointer& ssl, + Local issuer_chain, + Environment* const env) { + Local context = env->isolate()->GetCurrentContext(); + while (X509_check_issued(cert->get(), cert->get()) != X509_V_OK) { + X509* ca; + if (SSL_CTX_get_issuer(SSL_get_SSL_CTX(ssl.get()), cert->get(), &ca) <= 0) + break; + + Local ca_info; + MaybeLocal maybe_ca_info = X509ToObject(env, ca); + if (!maybe_ca_info.ToLocal(&ca_info)) + return MaybeLocal(); + + if (!Set(context, issuer_chain, env->issuercert_string(), ca_info)) + return MaybeLocal(); + issuer_chain = ca_info; + + // Delete previous cert and continue aggregating issuers. + cert->reset(ca); + } + return MaybeLocal(issuer_chain); +} + +MaybeLocal GetRawDERCertificate(Environment* env, X509* cert) { + int size = i2d_X509(cert, nullptr); + + AllocatedBuffer buffer = env->AllocateManaged(size); + unsigned char* serialized = + reinterpret_cast(buffer.data()); + i2d_X509(cert, &serialized); + return buffer.ToBuffer(); +} + +MaybeLocal GetSerialNumber(Environment* env, X509* cert) { + if (ASN1_INTEGER* serial_number = X509_get_serialNumber(cert)) { + BignumPointer bn(ASN1_INTEGER_to_BN(serial_number, nullptr)); + if (bn) { + OpenSSLBuffer buf(BN_bn2hex(bn.get())); + if (buf) + return OneByteString(env->isolate(), buf.get()); + } + } + + return Undefined(env->isolate()); +} + +MaybeLocal GetKeyUsage(Environment* env, X509* cert) { + StackOfASN1 eku(static_cast( + X509_get_ext_d2i(cert, NID_ext_key_usage, nullptr, nullptr))); + if (eku) { + const int count = sk_ASN1_OBJECT_num(eku.get()); + MaybeStackBuffer, 16> ext_key_usage(count); + char buf[256]; + + int j = 0; + for (int i = 0; i < count; i++) { + if (OBJ_obj2txt(buf, + sizeof(buf), + sk_ASN1_OBJECT_value(eku.get(), i), 1) >= 0) { + ext_key_usage[j++] = OneByteString(env->isolate(), buf); + } + } + + return Array::New(env->isolate(), ext_key_usage.out(), count); + } + + return Undefined(env->isolate()); +} + +void AddFingerprintDigest( + const unsigned char* md, + unsigned int md_size, + char (*fingerprint)[3 * EVP_MAX_MD_SIZE + 1]) { + unsigned int i; + const char hex[] = "0123456789ABCDEF"; + + for (i = 0; i < md_size; i++) { + (*fingerprint)[3*i] = hex[(md[i] & 0xf0) >> 4]; + (*fingerprint)[(3*i)+1] = hex[(md[i] & 0x0f)]; + (*fingerprint)[(3*i)+2] = ':'; + } + + if (md_size > 0) { + (*fingerprint)[(3*(md_size-1))+2] = '\0'; + } else { + (*fingerprint)[0] = '\0'; + } +} + +bool SafeX509ExtPrint(const BIOPointer& out, X509_EXTENSION* ext) { + const X509V3_EXT_METHOD* method = X509V3_EXT_get(ext); + + if (method != X509V3_EXT_get_nid(NID_subject_alt_name)) + return false; + + GENERAL_NAMES* names = static_cast(X509V3_EXT_d2i(ext)); + if (names == nullptr) + return false; + + for (int i = 0; i < sk_GENERAL_NAME_num(names); i++) { + GENERAL_NAME* gen = sk_GENERAL_NAME_value(names, i); + + if (i != 0) + BIO_write(out.get(), ", ", 2); + + if (gen->type == GEN_DNS) { + ASN1_IA5STRING* name = gen->d.dNSName; + + BIO_write(out.get(), "DNS:", 4); + BIO_write(out.get(), name->data, name->length); + } else { + STACK_OF(CONF_VALUE)* nval = i2v_GENERAL_NAME( + const_cast(method), gen, nullptr); + if (nval == nullptr) + return false; + X509V3_EXT_val_prn(out.get(), nval, 0, 0); + sk_CONF_VALUE_pop_free(nval, X509V3_conf_free); + } + } + sk_GENERAL_NAME_pop_free(names, GENERAL_NAME_free); + + return true; +} + +MaybeLocal GetFingerprintDigest( + Environment* env, + const EVP_MD* method, + X509* cert) { + unsigned char md[EVP_MAX_MD_SIZE]; + unsigned int md_size; + char fingerprint[EVP_MAX_MD_SIZE * 3 + 1]; + + if (X509_digest(cert, method, md, &md_size)) { + AddFingerprintDigest(md, md_size, &fingerprint); + return OneByteString(env->isolate(), fingerprint); + } + return Undefined(env->isolate()); +} + +MaybeLocal GetValidTo( + Environment* env, + X509* cert, + const BIOPointer& bio) { + ASN1_TIME_print(bio.get(), X509_get0_notAfter(cert)); + return ToV8Value(env, bio); +} + +MaybeLocal GetValidFrom( + Environment* env, + X509* cert, + const BIOPointer& bio) { + ASN1_TIME_print(bio.get(), X509_get0_notBefore(cert)); + return ToV8Value(env, bio); +} + +MaybeLocal GetCurveASN1Name(Environment* env, const int nid) { + const char* nist = OBJ_nid2sn(nid); + return nist != nullptr ? + MaybeLocal(OneByteString(env->isolate(), nist)) : + MaybeLocal(Undefined(env->isolate())); +} + +MaybeLocal GetCurveNistName(Environment* env, const int nid) { + const char* nist = EC_curve_nid2nist(nid); + return nist != nullptr ? + MaybeLocal(OneByteString(env->isolate(), nist)) : + MaybeLocal(Undefined(env->isolate())); +} + +MaybeLocal GetECPubKey( + Environment* env, + const EC_GROUP* group, + const ECPointer& ec) { + const EC_POINT* pubkey = EC_KEY_get0_public_key(ec.get()); + if (pubkey == nullptr) + return Undefined(env->isolate()); + + return ECPointToBuffer( + env, + group, + pubkey, + EC_KEY_get_conv_form(ec.get()), + nullptr).FromMaybe(Local()); +} + +MaybeLocal GetECGroup( + Environment* env, + const EC_GROUP* group, + const ECPointer& ec) { + if (group == nullptr) + return Undefined(env->isolate()); + + int bits = EC_GROUP_order_bits(group); + if (bits <= 0) + return Undefined(env->isolate()); + + return Integer::New(env->isolate(), bits); +} + +MaybeLocal GetPubKey(Environment* env, const RSAPointer& rsa) { + int size = i2d_RSA_PUBKEY(rsa.get(), nullptr); + CHECK_GE(size, 0); + + AllocatedBuffer buffer = env->AllocateManaged(size); + unsigned char* serialized = + reinterpret_cast(buffer.data()); + i2d_RSA_PUBKEY(rsa.get(), &serialized); + return buffer.ToBuffer(); +} + +MaybeLocal GetExponentString( + Environment* env, + const BIOPointer& bio, + const BIGNUM* e) { + uint64_t exponent_word = static_cast(BN_get_word(e)); + uint32_t lo = static_cast(exponent_word); + uint32_t hi = static_cast(exponent_word >> 32); + if (hi == 0) + BIO_printf(bio.get(), "0x%x", lo); + else + BIO_printf(bio.get(), "0x%x%08x", hi, lo); + + return ToV8Value(env, bio); +} + +Local GetBits(Environment* env, const BIGNUM* n) { + return Integer::New(env->isolate(), BN_num_bits(n)); +} + +MaybeLocal GetModulusString( + Environment* env, + const BIOPointer& bio, + const BIGNUM* n) { + BN_print(bio.get(), n); + return ToV8Value(env, bio); +} + +template +MaybeLocal GetInfoString( + Environment* env, + const BIOPointer& bio, + X509* cert) { + int index = X509_get_ext_by_NID(cert, nid, -1); + if (index < 0) + return Undefined(env->isolate()); + + X509_EXTENSION* ext = X509_get_ext(cert, index); + CHECK_NOT_NULL(ext); + + if (!SafeX509ExtPrint(bio, ext) && + X509V3_EXT_print(bio.get(), ext, 0, 0) != 1) { + USE(BIO_reset(bio.get())); + return Null(env->isolate()); + } + + return ToV8Value(env, bio); +} + +MaybeLocal GetIssuerString( + Environment* env, + const BIOPointer& bio, + X509* cert) { + X509_NAME* issuer_name = X509_get_issuer_name(cert); + if (X509_NAME_print_ex(bio.get(), issuer_name, 0, X509_NAME_FLAGS) <= 0) { + USE(BIO_reset(bio.get())); + return Undefined(env->isolate()); + } + + return ToV8Value(env, bio); +} + +MaybeLocal GetSubject( + Environment* env, + const BIOPointer& bio, + X509* cert) { + if (X509_NAME_print_ex( + bio.get(), + X509_get_subject_name(cert), + 0, + X509_NAME_FLAGS) <= 0) { + USE(BIO_reset(bio.get())); + return Undefined(env->isolate()); + } + + return ToV8Value(env, bio); +} +} // namespace + +MaybeLocal GetCipherName(Environment* env, const SSLPointer& ssl) { + return GetCipherName(env, SSL_get_current_cipher(ssl.get())); +} + +MaybeLocal GetCipherStandardName( + Environment* env, + const SSLPointer& ssl) { + return GetCipherStandardName(env, SSL_get_current_cipher(ssl.get())); +} + +MaybeLocal GetCipherVersion(Environment* env, const SSLPointer& ssl) { + return GetCipherVersion(env, SSL_get_current_cipher(ssl.get())); +} + +MaybeLocal GetClientHelloCiphers( + Environment* env, + const SSLPointer& ssl) { + EscapableHandleScope scope(env->isolate()); + const unsigned char* buf; + size_t len = SSL_client_hello_get0_ciphers(ssl.get(), &buf); + size_t count = len / 2; + MaybeStackBuffer, 16> ciphers(count); + int j = 0; + for (size_t n = 0; n < len; n += 2) { + const SSL_CIPHER* cipher = SSL_CIPHER_find(ssl.get(), buf); + buf += 2; + Local obj = Object::New(env->isolate()); + if (!Set(env->context(), + obj, + env->name_string(), + GetCipherName(env, cipher)) || + !Set(env->context(), + obj, + env->standard_name_string(), + GetCipherStandardName(env, cipher)) || + !Set(env->context(), + obj, + env->version_string(), + GetCipherVersion(env, cipher))) { + return MaybeLocal(); + } + ciphers[j++] = obj; + } + Local ret = Array::New(env->isolate(), ciphers.out(), count); + return scope.Escape(ret); +} + + +MaybeLocal GetCipherInfo(Environment* env, const SSLPointer& ssl) { + EscapableHandleScope scope(env->isolate()); + Local info = Object::New(env->isolate()); + + if (!Set(env->context(), + info, + env->name_string(), + GetCipherName(env, ssl)) || + !Set(env->context(), + info, + env->standard_name_string(), + GetCipherStandardName(env, ssl)) || + !Set(env->context(), + info, + env->version_string(), + GetCipherVersion(env, ssl))) { + return MaybeLocal(); + } + + return scope.Escape(info); +} + +MaybeLocal GetEphemeralKey(Environment* env, const SSLPointer& ssl) { + CHECK_EQ(SSL_is_server(ssl.get()), 0); + EVP_PKEY* raw_key; + + EscapableHandleScope scope(env->isolate()); + Local info = Object::New(env->isolate()); + if (!SSL_get_server_tmp_key(ssl.get(), &raw_key)) + return scope.Escape(info); + + Local context = env->context(); + crypto::EVPKeyPointer key(raw_key); + + int kid = EVP_PKEY_id(key.get()); + int bits = EVP_PKEY_bits(key.get()); + switch (kid) { + case EVP_PKEY_DH: + if (!Set(context, info, env->type_string(), env->dh_string()) || + !Set(context, + info, + env->size_string(), + Integer::New(env->isolate(), bits))) { + return MaybeLocal(); + } + break; + case EVP_PKEY_EC: + case EVP_PKEY_X25519: + case EVP_PKEY_X448: + { + const char* curve_name; + if (kid == EVP_PKEY_EC) { + ECKeyPointer ec(EVP_PKEY_get1_EC_KEY(key.get())); + int nid = EC_GROUP_get_curve_name(EC_KEY_get0_group(ec.get())); + curve_name = OBJ_nid2sn(nid); + } else { + curve_name = OBJ_nid2sn(kid); + } + if (!Set(context, + info, + env->type_string(), + env->ecdh_string()) || + !Set(context, + info, + env->name_string(), + OneByteString(env->isolate(), curve_name)) || + !Set(context, + info, + env->size_string(), + Integer::New(env->isolate(), bits))) { + return MaybeLocal(); + } + } + break; + } + + return scope.Escape(info); +} + +MaybeLocal ECPointToBuffer(Environment* env, + const EC_GROUP* group, + const EC_POINT* point, + point_conversion_form_t form, + const char** error) { + size_t len = EC_POINT_point2oct(group, point, form, nullptr, 0, nullptr); + if (len == 0) { + if (error != nullptr) *error = "Failed to get public key length"; + return MaybeLocal(); + } + AllocatedBuffer buf = env->AllocateManaged(len); + len = EC_POINT_point2oct(group, + point, + form, + reinterpret_cast(buf.data()), + buf.size(), + nullptr); + if (len == 0) { + if (error != nullptr) *error = "Failed to get public key"; + return MaybeLocal(); + } + return buf.ToBuffer(); +} + +MaybeLocal GetPeerCert( + Environment* env, + const SSLPointer& ssl, + bool abbreviated, + bool is_server) { + ClearErrorOnReturn clear_error_on_return; + Local result; + MaybeLocal maybe_cert; + + // NOTE: This is because of the odd OpenSSL behavior. On client `cert_chain` + // contains the `peer_certificate`, but on server it doesn't. + X509Pointer cert(is_server ? SSL_get_peer_certificate(ssl.get()) : nullptr); + STACK_OF(X509)* ssl_certs = SSL_get_peer_cert_chain(ssl.get()); + if (!cert && (ssl_certs == nullptr || sk_X509_num(ssl_certs) == 0)) + return Undefined(env->isolate()); + + // Short result requested. + if (abbreviated) { + maybe_cert = + X509ToObject(env, cert ? cert.get() : sk_X509_value(ssl_certs, 0)); + return maybe_cert.ToLocal(&result) ? result : MaybeLocal(); + } + + StackOfX509 peer_certs = CloneSSLCerts(std::move(cert), ssl_certs); + if (peer_certs == nullptr) + return Undefined(env->isolate()); + + // First and main certificate. + X509Pointer first_cert(sk_X509_value(peer_certs.get(), 0)); + CHECK(first_cert); + maybe_cert = X509ToObject(env, first_cert.release()).ToLocalChecked(); + if (!maybe_cert.ToLocal(&result)) + return MaybeLocal(); + + Local issuer_chain; + MaybeLocal maybe_issuer_chain; + + maybe_issuer_chain = + AddIssuerChainToObject( + &cert, + result, + std::move(peer_certs), + env); + if (!maybe_issuer_chain.ToLocal(&issuer_chain)) + return MaybeLocal(); + + maybe_issuer_chain = + GetLastIssuedCert( + &cert, + ssl, + issuer_chain, + env); + + issuer_chain.Clear(); + if (!maybe_issuer_chain.ToLocal(&issuer_chain)) + return MaybeLocal(); + + // Last certificate should be self-signed. + if (X509_check_issued(cert.get(), cert.get()) == X509_V_OK && + !Set(env->context(), + issuer_chain, + env->issuercert_string(), + issuer_chain)) { + return MaybeLocal(); + } + + return result; +} + +MaybeLocal X509ToObject(Environment* env, X509* cert) { + EscapableHandleScope scope(env->isolate()); + Local context = env->context(); + Local info = Object::New(env->isolate()); + + BIOPointer bio(BIO_new(BIO_s_mem())); + + if (!Set(context, + info, + env->subject_string(), + GetSubject(env, bio, cert)) || + !Set(context, + info, + env->issuer_string(), + GetIssuerString(env, bio, cert)) || + !Set(context, + info, + env->subjectaltname_string(), + GetInfoString(env, bio, cert)) || + !Set(context, + info, + env->infoaccess_string(), + GetInfoString(env, bio, cert))) { + return MaybeLocal(); + } + + EVPKeyPointer pkey(X509_get_pubkey(cert)); + RSAPointer rsa; + ECPointer ec; + if (pkey) { + switch (EVP_PKEY_id(pkey.get())) { + case EVP_PKEY_RSA: + rsa.reset(EVP_PKEY_get1_RSA(pkey.get())); + break; + case EVP_PKEY_EC: + ec.reset(EVP_PKEY_get1_EC_KEY(pkey.get())); + break; + } + } + + if (rsa) { + const BIGNUM* n; + const BIGNUM* e; + RSA_get0_key(rsa.get(), &n, &e, nullptr); + if (!Set(context, + info, + env->modulus_string(), + GetModulusString(env, bio, n)) || + !Set(context, info, env->bits_string(), GetBits(env, n)) || + !Set(context, + info, + env->exponent_string(), + GetExponentString(env, bio, e)) || + !Set(context, + info, + env->pubkey_string(), + GetPubKey(env, rsa))) { + return MaybeLocal(); + } + } else if (ec) { + const EC_GROUP* group = EC_KEY_get0_group(ec.get()); + + if (!Set(context, + info, + env->bits_string(), + GetECGroup(env, group, ec)) || + !Set(context, + info, + env->pubkey_string(), + GetECPubKey(env, group, ec))) { + return MaybeLocal(); + } + + const int nid = EC_GROUP_get_curve_name(group); + if (nid != 0) { + // Curve is well-known, get its OID and NIST nick-name (if it has one). + + if (!Set(context, + info, + env->asn1curve_string(), + GetCurveASN1Name(env, nid)) || + !Set(context, + info, + env->nistcurve_string(), + GetCurveNistName(env, nid))) { + return MaybeLocal(); + } + } else { + // Unnamed curves can be described by their mathematical properties, + // but aren't used much (at all?) with X.509/TLS. Support later if needed. + } + } + + // pkey, rsa, and ec pointers are no longer needed. + pkey.reset(); + rsa.reset(); + ec.reset(); + + if (!Set(context, + info, + env->valid_from_string(), + GetValidFrom(env, cert, bio)) || + !Set(context, + info, + env->valid_to_string(), + GetValidTo(env, cert, bio))) { + return MaybeLocal(); + } + + // bio is no longer needed + bio.reset(); + + if (!Set(context, + info, + env->fingerprint_string(), + GetFingerprintDigest(env, EVP_sha1(), cert)) || + !Set(context, + info, + env->fingerprint256_string(), + GetFingerprintDigest(env, EVP_sha256(), cert)) || + !Set(context, + info, + env->ext_key_usage_string(), + GetKeyUsage(env, cert)) || + !Set(context, + info, + env->serial_number_string(), + GetSerialNumber(env, cert)) || + !Set(context, + info, + env->raw_string(), + GetRawDERCertificate(env, cert))) { + return MaybeLocal(); + } + + return scope.Escape(info); +} + +} // namespace crypto +} // namespace node diff --git a/src/node_crypto_common.h b/src/node_crypto_common.h new file mode 100644 index 00000000000..e42e249ef2b --- /dev/null +++ b/src/node_crypto_common.h @@ -0,0 +1,139 @@ +#ifndef SRC_NODE_CRYPTO_COMMON_H_ +#define SRC_NODE_CRYPTO_COMMON_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "env.h" +#include "node_crypto.h" +#include "v8.h" +#include +#include + +#include +#include + +namespace node { +namespace crypto { + +// OPENSSL_free is a macro, so we need a wrapper function. +struct OpenSSLBufferDeleter { + void operator()(char* pointer) const { OPENSSL_free(pointer); } +}; +using OpenSSLBuffer = std::unique_ptr; + +struct StackOfX509Deleter { + void operator()(STACK_OF(X509)* p) const { sk_X509_pop_free(p, X509_free); } +}; +using StackOfX509 = std::unique_ptr; + +struct StackOfXASN1Deleter { + void operator()(STACK_OF(ASN1_OBJECT)* p) const { + sk_ASN1_OBJECT_pop_free(p, ASN1_OBJECT_free); + } +}; +using StackOfASN1 = std::unique_ptr; + +int SSL_CTX_get_issuer(SSL_CTX* ctx, X509* cert, X509** issuer); + +void LogSecret( + const SSLPointer& ssl, + const char* name, + const unsigned char* secret, + size_t secretlen); + +bool SetALPN(const SSLPointer& ssl, const std::string& alpn); + +bool SetALPN(const SSLPointer& ssl, v8::Local alpn); + +v8::MaybeLocal GetSSLOCSPResponse( + Environment* env, + SSL* ssl, + v8::Local default_value); + +bool SetTLSSession( + const SSLPointer& ssl, + const unsigned char* buf, + size_t length); + +bool SetTLSSession( + const SSLPointer& ssl, + const SSLSessionPointer& session); + +SSLSessionPointer GetTLSSession(v8::Local val); + +SSLSessionPointer GetTLSSession(const unsigned char* buf, size_t length); + +std::unordered_multimap +GetCertificateAltNames(X509* cert); + +std::string GetCertificateCN(X509* cert); + +long VerifyPeerCertificate( // NOLINT(runtime/int) + const SSLPointer& ssl, + long def = X509_V_ERR_UNSPECIFIED); // NOLINT(runtime/int) + +int UseSNIContext(const SSLPointer& ssl, SecureContext* context); + +const char* GetClientHelloALPN(const SSLPointer& ssl); + +const char* GetClientHelloServerName(const SSLPointer& ssl); + +const char* GetServerName(SSL* ssl); + +v8::MaybeLocal GetClientHelloCiphers( + Environment* env, + const SSLPointer& ssl); + +bool SetGroups(SecureContext* sc, const char* groups); + +const char* X509ErrorCode(long err); // NOLINT(runtime/int) + +v8::MaybeLocal GetValidationErrorReason(Environment* env, int err); + +v8::MaybeLocal GetValidationErrorCode(Environment* env, int err); + +v8::MaybeLocal GetCert(Environment* env, const SSLPointer& ssl); + +v8::MaybeLocal GetCipherName( + Environment* env, + const SSLPointer& ssl); + +v8::MaybeLocal GetCipherStandardName( + Environment* env, + const SSLPointer& ssl); + +v8::MaybeLocal GetCipherVersion( + Environment* env, + const SSLPointer& ssl); + +v8::MaybeLocal GetCipherInfo( + Environment* env, + const SSLPointer& ssl); + +v8::MaybeLocal GetEphemeralKey( + Environment* env, + const SSLPointer& ssl); + +v8::MaybeLocal GetPeerCert( + Environment* env, + const SSLPointer& ssl, + bool abbreviated = false, + bool is_server = false); + +v8::MaybeLocal ECPointToBuffer( + Environment* env, + const EC_GROUP* group, + const EC_POINT* point, + point_conversion_form_t form, + const char** error); + +v8::MaybeLocal X509ToObject( + Environment* env, + X509* cert); + +} // namespace crypto +} // namespace node + +#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#endif // SRC_NODE_CRYPTO_COMMON_H_ diff --git a/src/node_dir.cc b/src/node_dir.cc index ec53d8216bc..9923f042779 100644 --- a/src/node_dir.cc +++ b/src/node_dir.cc @@ -358,7 +358,7 @@ void Initialize(Local target, env->SetProtoMethod(dir, "read", DirHandle::Read); env->SetProtoMethod(dir, "close", DirHandle::Close); Local dirt = dir->InstanceTemplate(); - dirt->SetInternalFieldCount(DirHandle::kDirHandleFieldCount); + dirt->SetInternalFieldCount(DirHandle::kInternalFieldCount); Local handleString = FIXED_ONE_BYTE_STRING(isolate, "DirHandle"); dir->SetClassName(handleString); diff --git a/src/node_dir.h b/src/node_dir.h index b55245d5b89..5fcc36326b7 100644 --- a/src/node_dir.h +++ b/src/node_dir.h @@ -12,8 +12,6 @@ namespace fs_dir { // Needed to propagate `uv_dir_t`. class DirHandle : public AsyncWrap { public: - static constexpr int kDirHandleFieldCount = 1; - static DirHandle* New(Environment* env, uv_dir_t* dir); ~DirHandle() override; diff --git a/src/node_env_var.cc b/src/node_env_var.cc index 02f24fff205..208bb6981c2 100644 --- a/src/node_env_var.cc +++ b/src/node_env_var.cc @@ -1,3 +1,4 @@ +#include "debug_utils-inl.h" #include "env-inl.h" #include "node_errors.h" #include "node_process.h" @@ -30,8 +31,10 @@ using v8::Value; class RealEnvStore final : public KVStore { public: MaybeLocal Get(Isolate* isolate, Local key) const override; + Maybe Get(const char* key) const override; void Set(Isolate* isolate, Local key, Local value) override; int32_t Query(Isolate* isolate, Local key) const override; + int32_t Query(const char* key) const override; void Delete(Isolate* isolate, Local key) override; Local Enumerate(Isolate* isolate) const override; }; @@ -39,8 +42,10 @@ class RealEnvStore final : public KVStore { class MapKVStore final : public KVStore { public: MaybeLocal Get(Isolate* isolate, Local key) const override; + Maybe Get(const char* key) const override; void Set(Isolate* isolate, Local key, Local value) override; int32_t Query(Isolate* isolate, Local key) const override; + int32_t Query(const char* key) const override; void Delete(Isolate* isolate, Local key) override; Local Enumerate(Isolate* isolate) const override; @@ -72,26 +77,36 @@ void DateTimeConfigurationChangeNotification(Isolate* isolate, const T& key) { } } -MaybeLocal RealEnvStore::Get(Isolate* isolate, - Local property) const { +Maybe RealEnvStore::Get(const char* key) const { Mutex::ScopedLock lock(per_process::env_var_mutex); - node::Utf8Value key(isolate, property); size_t init_sz = 256; MaybeStackBuffer val; - int ret = uv_os_getenv(*key, *val, &init_sz); + int ret = uv_os_getenv(key, *val, &init_sz); if (ret == UV_ENOBUFS) { // Buffer is not large enough, reallocate to the updated init_sz // and fetch env value again. val.AllocateSufficientStorage(init_sz); - ret = uv_os_getenv(*key, *val, &init_sz); + ret = uv_os_getenv(key, *val, &init_sz); } if (ret >= 0) { // Env key value fetch success. - MaybeLocal value_string = - String::NewFromUtf8(isolate, *val, NewStringType::kNormal, init_sz); - return value_string; + return v8::Just(std::string(*val, init_sz)); + } + + return v8::Nothing(); +} + +MaybeLocal RealEnvStore::Get(Isolate* isolate, + Local property) const { + node::Utf8Value key(isolate, property); + Maybe value = Get(*key); + + if (value.IsJust()) { + std::string val = value.FromJust(); + return String::NewFromUtf8( + isolate, val.data(), NewStringType::kNormal, val.size()); } return MaybeLocal(); @@ -112,14 +127,12 @@ void RealEnvStore::Set(Isolate* isolate, DateTimeConfigurationChangeNotification(isolate, key); } -int32_t RealEnvStore::Query(Isolate* isolate, Local property) const { +int32_t RealEnvStore::Query(const char* key) const { Mutex::ScopedLock lock(per_process::env_var_mutex); - node::Utf8Value key(isolate, property); - char val[2]; size_t init_sz = sizeof(val); - int ret = uv_os_getenv(*key, val, &init_sz); + int ret = uv_os_getenv(key, val, &init_sz); if (ret == UV_ENOENT) { return -1; @@ -136,6 +149,11 @@ int32_t RealEnvStore::Query(Isolate* isolate, Local property) const { return 0; } +int32_t RealEnvStore::Query(Isolate* isolate, Local property) const { + node::Utf8Value key(isolate, property); + return Query(*key); +} + void RealEnvStore::Delete(Isolate* isolate, Local property) { Mutex::ScopedLock lock(per_process::env_var_mutex); @@ -190,13 +208,19 @@ std::shared_ptr KVStore::Clone(v8::Isolate* isolate) const { return copy; } -MaybeLocal MapKVStore::Get(Isolate* isolate, Local key) const { +Maybe MapKVStore::Get(const char* key) const { Mutex::ScopedLock lock(mutex_); + auto it = map_.find(key); + return it == map_.end() ? v8::Nothing() : v8::Just(it->second); +} + +MaybeLocal MapKVStore::Get(Isolate* isolate, Local key) const { Utf8Value str(isolate, key); - auto it = map_.find(std::string(*str, str.length())); - if (it == map_.end()) return Local(); - return String::NewFromUtf8(isolate, it->second.data(), - NewStringType::kNormal, it->second.size()); + Maybe value = Get(*str); + if (value.IsNothing()) return Local(); + std::string val = value.FromJust(); + return String::NewFromUtf8( + isolate, val.data(), NewStringType::kNormal, val.size()); } void MapKVStore::Set(Isolate* isolate, Local key, Local value) { @@ -209,11 +233,14 @@ void MapKVStore::Set(Isolate* isolate, Local key, Local value) { } } -int32_t MapKVStore::Query(Isolate* isolate, Local key) const { +int32_t MapKVStore::Query(const char* key) const { Mutex::ScopedLock lock(mutex_); + return map_.find(key) == map_.end() ? -1 : 0; +} + +int32_t MapKVStore::Query(Isolate* isolate, Local key) const { Utf8Value str(isolate, key); - auto it = map_.find(std::string(*str, str.length())); - return it == map_.end() ? -1 : 0; + return Query(*str); } void MapKVStore::Delete(Isolate* isolate, Local key) { diff --git a/src/node_errors.h b/src/node_errors.h index d56bf7ef5a5..960cb725323 100644 --- a/src/node_errors.h +++ b/src/node_errors.h @@ -39,11 +39,13 @@ void OnFatalError(const char* location, const char* message); V(ERR_CONSTRUCT_CALL_INVALID, TypeError) \ V(ERR_CRYPTO_UNKNOWN_CIPHER, Error) \ V(ERR_CRYPTO_UNKNOWN_DH_GROUP, Error) \ + V(ERR_EXECUTION_ENVIRONMENT_NOT_AVAILABLE, Error) \ V(ERR_INVALID_ARG_VALUE, TypeError) \ V(ERR_OSSL_EVP_INVALID_DIGEST, Error) \ V(ERR_INVALID_ARG_TYPE, TypeError) \ V(ERR_INVALID_MODULE_SPECIFIER, TypeError) \ - V(ERR_INVALID_PACKAGE_CONFIG, SyntaxError) \ + V(ERR_INVALID_PACKAGE_CONFIG, Error) \ + V(ERR_INVALID_PACKAGE_TARGET, Error) \ V(ERR_INVALID_TRANSFER_OBJECT, TypeError) \ V(ERR_MEMORY_ALLOCATION_FAILED, Error) \ V(ERR_MISSING_ARGS, TypeError) \ @@ -53,6 +55,7 @@ void OnFatalError(const char* location, const char* message); V(ERR_NON_CONTEXT_AWARE_DISABLED, Error) \ V(ERR_MODULE_NOT_FOUND, Error) \ V(ERR_OUT_OF_RANGE, RangeError) \ + V(ERR_PACKAGE_PATH_NOT_EXPORTED, Error) \ V(ERR_SCRIPT_EXECUTION_INTERRUPTED, Error) \ V(ERR_SCRIPT_EXECUTION_TIMEOUT, Error) \ V(ERR_STRING_TOO_LONG, Error) \ @@ -84,28 +87,30 @@ void OnFatalError(const char* location, const char* message); // Errors with predefined static messages -#define PREDEFINED_ERROR_MESSAGES(V) \ - V(ERR_BUFFER_CONTEXT_NOT_AVAILABLE, \ - "Buffer is not available for the current Context") \ - V(ERR_CONSTRUCT_CALL_INVALID, "Constructor cannot be called") \ - V(ERR_CONSTRUCT_CALL_REQUIRED, "Cannot call constructor without `new`") \ - V(ERR_CRYPTO_UNKNOWN_CIPHER, "Unknown cipher") \ - V(ERR_CRYPTO_UNKNOWN_DH_GROUP, "Unknown DH group") \ - V(ERR_INVALID_TRANSFER_OBJECT, "Found invalid object in transferList") \ - V(ERR_MEMORY_ALLOCATION_FAILED, "Failed to allocate memory") \ - V(ERR_OSSL_EVP_INVALID_DIGEST, "Invalid digest used") \ - V(ERR_MISSING_MESSAGE_PORT_IN_TRANSFER_LIST, \ - "MessagePort was found in message but not listed in transferList") \ - V(ERR_MISSING_PLATFORM_FOR_WORKER, \ - "The V8 platform used by this instance of Node does not support " \ - "creating Workers") \ - V(ERR_NON_CONTEXT_AWARE_DISABLED, \ - "Loading non context-aware native modules has been disabled") \ - V(ERR_SCRIPT_EXECUTION_INTERRUPTED, \ - "Script execution was interrupted by `SIGINT`") \ - V(ERR_TRANSFERRING_EXTERNALIZED_SHAREDARRAYBUFFER, \ - "Cannot serialize externalized SharedArrayBuffer") \ - V(ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED, "Failed to set PSK identity hint") \ +#define PREDEFINED_ERROR_MESSAGES(V) \ + V(ERR_BUFFER_CONTEXT_NOT_AVAILABLE, \ + "Buffer is not available for the current Context") \ + V(ERR_CONSTRUCT_CALL_INVALID, "Constructor cannot be called") \ + V(ERR_CONSTRUCT_CALL_REQUIRED, "Cannot call constructor without `new`") \ + V(ERR_CRYPTO_UNKNOWN_CIPHER, "Unknown cipher") \ + V(ERR_CRYPTO_UNKNOWN_DH_GROUP, "Unknown DH group") \ + V(ERR_EXECUTION_ENVIRONMENT_NOT_AVAILABLE, \ + "Context not associated with Node.js environment") \ + V(ERR_INVALID_TRANSFER_OBJECT, "Found invalid object in transferList") \ + V(ERR_MEMORY_ALLOCATION_FAILED, "Failed to allocate memory") \ + V(ERR_OSSL_EVP_INVALID_DIGEST, "Invalid digest used") \ + V(ERR_MISSING_MESSAGE_PORT_IN_TRANSFER_LIST, \ + "MessagePort was found in message but not listed in transferList") \ + V(ERR_MISSING_PLATFORM_FOR_WORKER, \ + "The V8 platform used by this instance of Node does not support " \ + "creating Workers") \ + V(ERR_NON_CONTEXT_AWARE_DISABLED, \ + "Loading non context-aware native modules has been disabled") \ + V(ERR_SCRIPT_EXECUTION_INTERRUPTED, \ + "Script execution was interrupted by `SIGINT`") \ + V(ERR_TRANSFERRING_EXTERNALIZED_SHAREDARRAYBUFFER, \ + "Cannot serialize externalized SharedArrayBuffer") \ + V(ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED, "Failed to set PSK identity hint") #define V(code, message) \ inline v8::Local code(v8::Isolate* isolate) { \ diff --git a/src/node_file.cc b/src/node_file.cc index 2f66080e5ca..121fd35f573 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -748,16 +748,11 @@ void AfterScanDirWithTypes(uv_fs_t* req) { type_v.emplace_back(Integer::New(isolate, ent.type)); } - Local result = Array::New(isolate, 2); - result->Set(env->context(), - 0, - Array::New(isolate, name_v.data(), - name_v.size())).Check(); - result->Set(env->context(), - 1, - Array::New(isolate, type_v.data(), - type_v.size())).Check(); - req_wrap->Resolve(result); + Local result[] = { + Array::New(isolate, name_v.data(), name_v.size()), + Array::New(isolate, type_v.data(), type_v.size()) + }; + req_wrap->Resolve(Array::New(isolate, result, arraysize(result))); } void Access(const FunctionCallbackInfo& args) { @@ -1611,13 +1606,11 @@ static void ReadDir(const FunctionCallbackInfo& args) { Local names = Array::New(isolate, name_v.data(), name_v.size()); if (with_types) { - Local result = Array::New(isolate, 2); - result->Set(env->context(), 0, names).Check(); - result->Set(env->context(), - 1, - Array::New(isolate, type_v.data(), - type_v.size())).Check(); - args.GetReturnValue().Set(result); + Local result[] = { + names, + Array::New(isolate, type_v.data(), type_v.size()) + }; + args.GetReturnValue().Set(Array::New(isolate, result, arraysize(result))); } else { args.GetReturnValue().Set(names); } @@ -2297,7 +2290,8 @@ void Initialize(Local target, // Create FunctionTemplate for FSReqCallback Local fst = env->NewFunctionTemplate(NewFSReqCallback); - fst->InstanceTemplate()->SetInternalFieldCount(1); + fst->InstanceTemplate()->SetInternalFieldCount( + FSReqBase::kInternalFieldCount); fst->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local wrapString = FIXED_ONE_BYTE_STRING(isolate, "FSReqCallback"); @@ -2310,7 +2304,8 @@ void Initialize(Local target, // Create FunctionTemplate for FileHandleReadWrap. There’s no need // to do anything in the constructor, so we only store the instance template. Local fh_rw = FunctionTemplate::New(isolate); - fh_rw->InstanceTemplate()->SetInternalFieldCount(1); + fh_rw->InstanceTemplate()->SetInternalFieldCount( + FSReqBase::kInternalFieldCount); fh_rw->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local fhWrapString = FIXED_ONE_BYTE_STRING(isolate, "FileHandleReqWrap"); @@ -2325,7 +2320,7 @@ void Initialize(Local target, FIXED_ONE_BYTE_STRING(isolate, "FSReqPromise"); fpt->SetClassName(promiseString); Local fpo = fpt->InstanceTemplate(); - fpo->SetInternalFieldCount(1); + fpo->SetInternalFieldCount(FSReqBase::kInternalFieldCount); env->set_fsreqpromise_constructor_template(fpo); // Create FunctionTemplate for FileHandle @@ -2334,7 +2329,7 @@ void Initialize(Local target, env->SetProtoMethod(fd, "close", FileHandle::Close); env->SetProtoMethod(fd, "releaseFD", FileHandle::ReleaseFD); Local fdt = fd->InstanceTemplate(); - fdt->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + fdt->SetInternalFieldCount(StreamBase::kInternalFieldCount); Local handleString = FIXED_ONE_BYTE_STRING(isolate, "FileHandle"); fd->SetClassName(handleString); @@ -2351,7 +2346,7 @@ void Initialize(Local target, "FileHandleCloseReq")); fdclose->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local fdcloset = fdclose->InstanceTemplate(); - fdcloset->SetInternalFieldCount(1); + fdcloset->SetInternalFieldCount(FSReqBase::kInternalFieldCount); env->set_fdclose_constructor_template(fdcloset); Local use_promises_symbol = diff --git a/src/node_http2.cc b/src/node_http2.cc index c21adcfeb56..835e15587ff 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -5,6 +5,7 @@ #include "node_buffer.h" #include "node_http2.h" #include "node_http2_state.h" +#include "node_http_common-inl.h" #include "node_mem-inl.h" #include "node_perf.h" #include "node_revert.h" @@ -356,66 +357,6 @@ const char* Http2Session::TypeName() const { } } -// The Headers class initializes a proper array of nghttp2_nv structs -// containing the header name value pairs. -Headers::Headers(Isolate* isolate, - Local context, - Local headers) { - Local header_string = headers->Get(context, 0).ToLocalChecked(); - Local header_count = headers->Get(context, 1).ToLocalChecked(); - count_ = header_count.As()->Value(); - int header_string_len = header_string.As()->Length(); - - if (count_ == 0) { - CHECK_EQ(header_string_len, 0); - return; - } - - // Allocate a single buffer with count_ nghttp2_nv structs, followed - // by the raw header data as passed from JS. This looks like: - // | possible padding | nghttp2_nv | nghttp2_nv | ... | header contents | - buf_.AllocateSufficientStorage((alignof(nghttp2_nv) - 1) + - count_ * sizeof(nghttp2_nv) + - header_string_len); - // Make sure the start address is aligned appropriately for an nghttp2_nv*. - char* start = reinterpret_cast( - RoundUp(reinterpret_cast(*buf_), alignof(nghttp2_nv))); - char* header_contents = start + (count_ * sizeof(nghttp2_nv)); - nghttp2_nv* const nva = reinterpret_cast(start); - - CHECK_LE(header_contents + header_string_len, *buf_ + buf_.length()); - CHECK_EQ(header_string.As()->WriteOneByte( - isolate, - reinterpret_cast(header_contents), - 0, - header_string_len, - String::NO_NULL_TERMINATION), - header_string_len); - - size_t n = 0; - char* p; - for (p = header_contents; p < header_contents + header_string_len; n++) { - if (n >= count_) { - // This can happen if a passed header contained a null byte. In that - // case, just provide nghttp2 with an invalid header to make it reject - // the headers list. - static uint8_t zero = '\0'; - nva[0].name = nva[0].value = &zero; - nva[0].namelen = nva[0].valuelen = 1; - count_ = 1; - return; - } - - nva[n].flags = NGHTTP2_NV_FLAG_NONE; - nva[n].name = reinterpret_cast(p); - nva[n].namelen = strlen(p); - p += nva[n].namelen + 1; - nva[n].value = reinterpret_cast(p); - nva[n].valuelen = strlen(p); - p += nva[n].valuelen + 1; - } -} - Origins::Origins(Isolate* isolate, Local context, Local origin_string, @@ -538,8 +479,8 @@ Http2Session::Http2Session(Environment* env, uint32_t maxHeaderPairs = opts.GetMaxHeaderPairs(); max_header_pairs_ = type == NGHTTP2_SESSION_SERVER - ? std::max(maxHeaderPairs, 4U) // minimum # of request headers - : std::max(maxHeaderPairs, 1U); // minimum # of response headers + ? GetServerMaxHeaderPairs(maxHeaderPairs) + : GetClientMaxHeaderPairs(maxHeaderPairs); max_outstanding_pings_ = opts.GetMaxOutstandingPings(); max_outstanding_settings_ = opts.GetMaxOutstandingSettings(); @@ -1249,34 +1190,30 @@ void Http2Session::HandleHeadersFrame(const nghttp2_frame* frame) { if (stream->IsDestroyed()) return; - std::vector headers(stream->move_headers()); - DecrementCurrentSessionMemory(stream->current_headers_length_); - stream->current_headers_length_ = 0; - - // The headers are passed in above as a queue of nghttp2_header structs. + // The headers are stored as a vector of Http2Header instances. // The following converts that into a JS array with the structure: // [name1, value1, name2, value2, name3, value3, name3, value4] and so on. // That array is passed up to the JS layer and converted into an Object form // like {name1: value1, name2: value2, name3: [value3, value4]}. We do it // this way for performance reasons (it's faster to generate and pass an // array than it is to generate and pass the object). - size_t headers_size = headers.size(); - std::vector> headers_v(headers_size * 2); - for (size_t i = 0; i < headers_size; ++i) { - const nghttp2_header& item = headers[i]; - // The header name and value are passed as external one-byte strings - headers_v[i * 2] = - ExternalHeader::New(this, item.name).ToLocalChecked(); - headers_v[i * 2 + 1] = - ExternalHeader::New(this, item.value).ToLocalChecked(); - } + + std::vector> headers_v(stream->headers_count() * 2); + stream->TransferHeaders([&](const Http2Header& header, size_t i) { + headers_v[i * 2] = header.GetName(this).ToLocalChecked(); + headers_v[i * 2 + 1] = header.GetValue(this).ToLocalChecked(); + }); + CHECK_EQ(stream->headers_count(), 0); + + DecrementCurrentSessionMemory(stream->current_headers_length_); + stream->current_headers_length_ = 0; Local args[5] = { stream->object(), Integer::New(isolate, id), Integer::New(isolate, stream->headers_category()), Integer::New(isolate, frame->hd.flags), - Array::New(isolate, headers_v.data(), headers_size * 2)}; + Array::New(isolate, headers_v.data(), headers_v.size())}; MakeCallback(env()->http2session_on_headers_function(), arraysize(args), args); } @@ -1761,15 +1698,20 @@ int Http2Session::OnSendData( // Creates a new Http2Stream and submits a new http2 request. Http2Stream* Http2Session::SubmitRequest( nghttp2_priority_spec* prispec, - nghttp2_nv* nva, - size_t len, + const Http2Headers& headers, int32_t* ret, int options) { Debug(this, "submitting request"); Http2Scope h2scope(this); Http2Stream* stream = nullptr; Http2Stream::Provider::Stream prov(options); - *ret = nghttp2_submit_request(session_, prispec, nva, len, *prov, nullptr); + *ret = nghttp2_submit_request( + session_, + prispec, + headers.data(), + headers.length(), + *prov, + nullptr); CHECK_NE(*ret, NGHTTP2_ERR_NOMEM); if (LIKELY(*ret > 0)) stream = Http2Stream::New(this, *ret, NGHTTP2_HCAT_HEADERS, options); @@ -1918,13 +1860,7 @@ Http2Stream::Http2Stream(Http2Session* session, session->AddStream(this); } - Http2Stream::~Http2Stream() { - for (nghttp2_header& header : current_headers_) { - nghttp2_rcbuf_decref(header.name); - nghttp2_rcbuf_decref(header.value); - } - if (!session_) return; Debug(this, "tearing down stream"); @@ -2026,7 +1962,7 @@ void Http2Stream::Destroy() { // Initiates a response on the Http2Stream using data provided via the // StreamBase Streams API. -int Http2Stream::SubmitResponse(nghttp2_nv* nva, size_t len, int options) { +int Http2Stream::SubmitResponse(const Http2Headers& headers, int options) { CHECK(!this->IsDestroyed()); Http2Scope h2scope(this); Debug(this, "submitting response"); @@ -2037,21 +1973,30 @@ int Http2Stream::SubmitResponse(nghttp2_nv* nva, size_t len, int options) { options |= STREAM_OPTION_EMPTY_PAYLOAD; Http2Stream::Provider::Stream prov(this, options); - int ret = nghttp2_submit_response(**session_, id_, nva, len, *prov); + int ret = nghttp2_submit_response( + **session_, + id_, + headers.data(), + headers.length(), + *prov); CHECK_NE(ret, NGHTTP2_ERR_NOMEM); return ret; } // Submit informational headers for a stream. -int Http2Stream::SubmitInfo(nghttp2_nv* nva, size_t len) { +int Http2Stream::SubmitInfo(const Http2Headers& headers) { CHECK(!this->IsDestroyed()); Http2Scope h2scope(this); - Debug(this, "sending %d informational headers", len); - int ret = nghttp2_submit_headers(**session_, - NGHTTP2_FLAG_NONE, - id_, nullptr, - nva, len, nullptr); + Debug(this, "sending %d informational headers", headers.length()); + int ret = nghttp2_submit_headers( + **session_, + NGHTTP2_FLAG_NONE, + id_, + nullptr, + headers.data(), + headers.length(), + nullptr); CHECK_NE(ret, NGHTTP2_ERR_NOMEM); return ret; } @@ -2068,19 +2013,23 @@ void Http2Stream::OnTrailers() { } // Submit informational headers for a stream. -int Http2Stream::SubmitTrailers(nghttp2_nv* nva, size_t len) { +int Http2Stream::SubmitTrailers(const Http2Headers& headers) { CHECK(!this->IsDestroyed()); Http2Scope h2scope(this); - Debug(this, "sending %d trailers", len); + Debug(this, "sending %d trailers", headers.length()); int ret; // Sending an empty trailers frame poses problems in Safari, Edge & IE. // Instead we can just send an empty data frame with NGHTTP2_FLAG_END_STREAM // to indicate that the stream is ready to be closed. - if (len == 0) { + if (headers.length() == 0) { Http2Stream::Provider::Stream prov(this, 0); ret = nghttp2_submit_data(**session_, NGHTTP2_FLAG_END_STREAM, id_, *prov); } else { - ret = nghttp2_submit_trailer(**session_, id_, nva, len); + ret = nghttp2_submit_trailer( + **session_, + id_, + headers.data(), + headers.length()); } CHECK_NE(ret, NGHTTP2_ERR_NOMEM); return ret; @@ -2129,15 +2078,19 @@ void Http2Stream::FlushRstStream() { // Submit a push promise and create the associated Http2Stream if successful. -Http2Stream* Http2Stream::SubmitPushPromise(nghttp2_nv* nva, - size_t len, +Http2Stream* Http2Stream::SubmitPushPromise(const Http2Headers& headers, int32_t* ret, int options) { CHECK(!this->IsDestroyed()); Http2Scope h2scope(this); Debug(this, "sending push promise"); - *ret = nghttp2_submit_push_promise(**session_, NGHTTP2_FLAG_NONE, - id_, nva, len, nullptr); + *ret = nghttp2_submit_push_promise( + **session_, + NGHTTP2_FLAG_NONE, + id_, + headers.data(), + headers.length(), + nullptr); CHECK_NE(*ret, NGHTTP2_ERR_NOMEM); Http2Stream* stream = nullptr; if (*ret > 0) { @@ -2221,12 +2174,12 @@ bool Http2Stream::AddHeader(nghttp2_rcbuf* name, nghttp2_rcbuf* value, uint8_t flags) { CHECK(!this->IsDestroyed()); - if (this->statistics_.first_header == 0) - this->statistics_.first_header = uv_hrtime(); - size_t name_len = nghttp2_rcbuf_get_buf(name).len; - if (name_len == 0) return true; // Ignore headers with empty names. - size_t value_len = nghttp2_rcbuf_get_buf(value).len; - size_t length = name_len + value_len + 32; + + if (Http2RcBufferPointer::IsZeroLength(name)) + return true; // Ignore empty headers. + + Http2Header header(env(), name, value, flags); + size_t length = header.length() + 32; // A header can only be added if we have not exceeded the maximum number // of headers and the session has memory available for it. if (!session_->IsAvailableSessionMemory(length) || @@ -2234,13 +2187,12 @@ bool Http2Stream::AddHeader(nghttp2_rcbuf* name, current_headers_length_ + length > max_header_length_) { return false; } - nghttp2_header header; - header.name = name; - header.value = value; - header.flags = flags; - current_headers_.push_back(header); - nghttp2_rcbuf_incref(name); - nghttp2_rcbuf_incref(value); + + if (statistics_.first_header == 0) + statistics_.first_header = uv_hrtime(); + + current_headers_.push_back(std::move(header)); + current_headers_length_ += length; session_->IncrementCurrentSessionMemory(length); return true; @@ -2487,21 +2439,20 @@ void Http2Session::Request(const FunctionCallbackInfo& args) { Http2Session* session; ASSIGN_OR_RETURN_UNWRAP(&session, args.Holder()); Environment* env = session->env(); - Local context = env->context(); - Isolate* isolate = env->isolate(); Local headers = args[0].As(); - int options = args[1]->IntegerValue(context).ToChecked(); + int options = args[1]->IntegerValue(env->context()).ToChecked(); Http2Priority priority(env, args[2], args[3], args[4]); - Headers list(isolate, context, headers); - Debug(session, "request submitted"); int32_t ret = 0; Http2Stream* stream = - session->Http2Session::SubmitRequest(*priority, *list, list.length(), - &ret, options); + session->Http2Session::SubmitRequest( + *priority, + Http2Headers(env, headers), + &ret, + options); if (ret <= 0 || stream == nullptr) { Debug(session, "could not submit request: %s", nghttp2_strerror(ret)); @@ -2586,18 +2537,14 @@ void Http2Stream::RstStream(const FunctionCallbackInfo& args) { // outbound DATA frames. void Http2Stream::Respond(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - Local context = env->context(); - Isolate* isolate = env->isolate(); Http2Stream* stream; ASSIGN_OR_RETURN_UNWRAP(&stream, args.Holder()); Local headers = args[0].As(); - int options = args[1]->IntegerValue(context).ToChecked(); - - Headers list(isolate, context, headers); + int options = args[1]->IntegerValue(env->context()).ToChecked(); args.GetReturnValue().Set( - stream->SubmitResponse(*list, list.length(), options)); + stream->SubmitResponse(Http2Headers(env, headers), options)); Debug(stream, "response submitted"); } @@ -2605,31 +2552,24 @@ void Http2Stream::Respond(const FunctionCallbackInfo& args) { // Submits informational headers on the Http2Stream void Http2Stream::Info(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - Local context = env->context(); - Isolate* isolate = env->isolate(); Http2Stream* stream; ASSIGN_OR_RETURN_UNWRAP(&stream, args.Holder()); Local headers = args[0].As(); - Headers list(isolate, context, headers); - args.GetReturnValue().Set(stream->SubmitInfo(*list, list.length())); - Debug(stream, "%d informational headers sent", list.length()); + args.GetReturnValue().Set(stream->SubmitInfo(Http2Headers(env, headers))); } // Submits trailing headers on the Http2Stream void Http2Stream::Trailers(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - Local context = env->context(); - Isolate* isolate = env->isolate(); Http2Stream* stream; ASSIGN_OR_RETURN_UNWRAP(&stream, args.Holder()); Local headers = args[0].As(); - Headers list(isolate, context, headers); - args.GetReturnValue().Set(stream->SubmitTrailers(*list, list.length())); - Debug(stream, "%d trailing headers sent", list.length()); + args.GetReturnValue().Set( + stream->SubmitTrailers(Http2Headers(env, headers))); } // Grab the numeric id of the Http2Stream @@ -2650,21 +2590,18 @@ void Http2Stream::Destroy(const FunctionCallbackInfo& args) { // Initiate a Push Promise and create the associated Http2Stream void Http2Stream::PushPromise(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - Local context = env->context(); - Isolate* isolate = env->isolate(); Http2Stream* parent; ASSIGN_OR_RETURN_UNWRAP(&parent, args.Holder()); Local headers = args[0].As(); - int options = args[1]->IntegerValue(context).ToChecked(); - - Headers list(isolate, context, headers); + int options = args[1]->IntegerValue(env->context()).ToChecked(); Debug(parent, "creating push promise"); int32_t ret = 0; - Http2Stream* stream = parent->SubmitPushPromise(*list, list.length(), - &ret, options); + Http2Stream* stream = + parent->SubmitPushPromise(Http2Headers(env, headers), &ret, options); + if (ret <= 0 || stream == nullptr) { Debug(parent, "failed to create push stream: %d", ret); return args.GetReturnValue().Set(ret); @@ -2940,12 +2877,6 @@ void nghttp2_stream_write::MemoryInfo(MemoryTracker* tracker) const { tracker->TrackField("buf", buf); } - -void nghttp2_header::MemoryInfo(MemoryTracker* tracker) const { - tracker->TrackFieldWithSize("name", nghttp2_rcbuf_get_buf(name).len); - tracker->TrackFieldWithSize("value", nghttp2_rcbuf_get_buf(value).len); -} - void SetCallbackFunctions(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); CHECK_EQ(args.Length(), 11); @@ -3025,14 +2956,14 @@ void Initialize(Local target, ping->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "Http2Ping")); ping->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local pingt = ping->InstanceTemplate(); - pingt->SetInternalFieldCount(1); + pingt->SetInternalFieldCount(Http2Session::Http2Ping::kInternalFieldCount); env->set_http2ping_constructor_template(pingt); Local setting = FunctionTemplate::New(env->isolate()); setting->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "Http2Setting")); setting->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local settingt = setting->InstanceTemplate(); - settingt->SetInternalFieldCount(1); + settingt->SetInternalFieldCount(AsyncWrap::kInternalFieldCount); env->set_http2settings_constructor_template(settingt); Local stream = FunctionTemplate::New(env->isolate()); @@ -3049,7 +2980,7 @@ void Initialize(Local target, stream->Inherit(AsyncWrap::GetConstructorTemplate(env)); StreamBase::AddMethods(env, stream); Local streamt = stream->InstanceTemplate(); - streamt->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + streamt->SetInternalFieldCount(StreamBase::kInternalFieldCount); env->set_http2stream_constructor_template(streamt); target->Set(context, FIXED_ONE_BYTE_STRING(env->isolate(), "Http2Stream"), @@ -3058,7 +2989,8 @@ void Initialize(Local target, Local session = env->NewFunctionTemplate(Http2Session::New); session->SetClassName(http2SessionClassName); - session->InstanceTemplate()->SetInternalFieldCount(1); + session->InstanceTemplate()->SetInternalFieldCount( + Http2Session::kInternalFieldCount); session->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(session, "origin", Http2Session::Origin); env->SetProtoMethod(session, "altsvc", Http2Session::AltSvc); diff --git a/src/node_http2.h b/src/node_http2.h index b468aac175d..e0a4e404491 100644 --- a/src/node_http2.h +++ b/src/node_http2.h @@ -8,6 +8,7 @@ #include "nghttp2/nghttp2.h" #include "node_http2_state.h" +#include "node_http_common.h" #include "node_mem.h" #include "node_perf.h" #include "stream_base-inl.h" @@ -50,8 +51,36 @@ using performance::PerformanceEntry; #define MIN_MAX_FRAME_SIZE DEFAULT_SETTINGS_MAX_FRAME_SIZE #define MAX_INITIAL_WINDOW_SIZE 2147483647 -#define MAX_MAX_HEADER_LIST_SIZE 16777215u -#define DEFAULT_MAX_HEADER_LIST_PAIRS 128u +struct Http2HeadersTraits { + typedef nghttp2_nv nv_t; + static const uint8_t kNoneFlag = NGHTTP2_NV_FLAG_NONE; +}; + +struct Http2RcBufferPointerTraits { + typedef nghttp2_rcbuf rcbuf_t; + typedef nghttp2_vec vector_t; + + static void inc(rcbuf_t* buf) { + CHECK_NOT_NULL(buf); + nghttp2_rcbuf_incref(buf); + } + static void dec(rcbuf_t* buf) { + CHECK_NOT_NULL(buf); + nghttp2_rcbuf_decref(buf); + } + static vector_t get_vec(rcbuf_t* buf) { + CHECK_NOT_NULL(buf); + return nghttp2_rcbuf_get_buf(buf); + } + static bool is_static(const rcbuf_t* buf) { + CHECK_NOT_NULL(buf); + return nghttp2_rcbuf_is_static(buf); + } +}; + +using Http2Headers = NgHeaders; +using Http2RcBufferPointer = NgRcBufPointer; + enum nghttp2_session_type { NGHTTP2_SESSION_SERVER, @@ -96,224 +125,6 @@ struct nghttp2_stream_write : public MemoryRetainer { SET_SELF_SIZE(nghttp2_stream_write) }; -struct nghttp2_header : public MemoryRetainer { - nghttp2_rcbuf* name = nullptr; - nghttp2_rcbuf* value = nullptr; - uint8_t flags = 0; - - void MemoryInfo(MemoryTracker* tracker) const override; - SET_MEMORY_INFO_NAME(nghttp2_header) - SET_SELF_SIZE(nghttp2_header) -}; - - -// Unlike the HTTP/1 implementation, the HTTP/2 implementation is not limited -// to a fixed number of known supported HTTP methods. These constants, therefore -// are provided strictly as a convenience to users and are exposed via the -// require('http2').constants object. -#define HTTP_KNOWN_METHODS(V) \ - V(ACL, "ACL") \ - V(BASELINE_CONTROL, "BASELINE-CONTROL") \ - V(BIND, "BIND") \ - V(CHECKIN, "CHECKIN") \ - V(CHECKOUT, "CHECKOUT") \ - V(CONNECT, "CONNECT") \ - V(COPY, "COPY") \ - V(DELETE, "DELETE") \ - V(GET, "GET") \ - V(HEAD, "HEAD") \ - V(LABEL, "LABEL") \ - V(LINK, "LINK") \ - V(LOCK, "LOCK") \ - V(MERGE, "MERGE") \ - V(MKACTIVITY, "MKACTIVITY") \ - V(MKCALENDAR, "MKCALENDAR") \ - V(MKCOL, "MKCOL") \ - V(MKREDIRECTREF, "MKREDIRECTREF") \ - V(MKWORKSPACE, "MKWORKSPACE") \ - V(MOVE, "MOVE") \ - V(OPTIONS, "OPTIONS") \ - V(ORDERPATCH, "ORDERPATCH") \ - V(PATCH, "PATCH") \ - V(POST, "POST") \ - V(PRI, "PRI") \ - V(PROPFIND, "PROPFIND") \ - V(PROPPATCH, "PROPPATCH") \ - V(PUT, "PUT") \ - V(REBIND, "REBIND") \ - V(REPORT, "REPORT") \ - V(SEARCH, "SEARCH") \ - V(TRACE, "TRACE") \ - V(UNBIND, "UNBIND") \ - V(UNCHECKOUT, "UNCHECKOUT") \ - V(UNLINK, "UNLINK") \ - V(UNLOCK, "UNLOCK") \ - V(UPDATE, "UPDATE") \ - V(UPDATEREDIRECTREF, "UPDATEREDIRECTREF") \ - V(VERSION_CONTROL, "VERSION-CONTROL") - -// These are provided strictly as a convenience to users and are exposed via the -// require('http2').constants objects -#define HTTP_KNOWN_HEADERS(V) \ - V(STATUS, ":status") \ - V(METHOD, ":method") \ - V(AUTHORITY, ":authority") \ - V(SCHEME, ":scheme") \ - V(PATH, ":path") \ - V(PROTOCOL, ":protocol") \ - V(ACCEPT_CHARSET, "accept-charset") \ - V(ACCEPT_ENCODING, "accept-encoding") \ - V(ACCEPT_LANGUAGE, "accept-language") \ - V(ACCEPT_RANGES, "accept-ranges") \ - V(ACCEPT, "accept") \ - V(ACCESS_CONTROL_ALLOW_CREDENTIALS, "access-control-allow-credentials") \ - V(ACCESS_CONTROL_ALLOW_HEADERS, "access-control-allow-headers") \ - V(ACCESS_CONTROL_ALLOW_METHODS, "access-control-allow-methods") \ - V(ACCESS_CONTROL_ALLOW_ORIGIN, "access-control-allow-origin") \ - V(ACCESS_CONTROL_EXPOSE_HEADERS, "access-control-expose-headers") \ - V(ACCESS_CONTROL_MAX_AGE, "access-control-max-age") \ - V(ACCESS_CONTROL_REQUEST_HEADERS, "access-control-request-headers") \ - V(ACCESS_CONTROL_REQUEST_METHOD, "access-control-request-method") \ - V(AGE, "age") \ - V(ALLOW, "allow") \ - V(AUTHORIZATION, "authorization") \ - V(CACHE_CONTROL, "cache-control") \ - V(CONNECTION, "connection") \ - V(CONTENT_DISPOSITION, "content-disposition") \ - V(CONTENT_ENCODING, "content-encoding") \ - V(CONTENT_LANGUAGE, "content-language") \ - V(CONTENT_LENGTH, "content-length") \ - V(CONTENT_LOCATION, "content-location") \ - V(CONTENT_MD5, "content-md5") \ - V(CONTENT_RANGE, "content-range") \ - V(CONTENT_TYPE, "content-type") \ - V(COOKIE, "cookie") \ - V(DATE, "date") \ - V(DNT, "dnt") \ - V(ETAG, "etag") \ - V(EXPECT, "expect") \ - V(EXPIRES, "expires") \ - V(FORWARDED, "forwarded") \ - V(FROM, "from") \ - V(HOST, "host") \ - V(IF_MATCH, "if-match") \ - V(IF_MODIFIED_SINCE, "if-modified-since") \ - V(IF_NONE_MATCH, "if-none-match") \ - V(IF_RANGE, "if-range") \ - V(IF_UNMODIFIED_SINCE, "if-unmodified-since") \ - V(LAST_MODIFIED, "last-modified") \ - V(LINK, "link") \ - V(LOCATION, "location") \ - V(MAX_FORWARDS, "max-forwards") \ - V(PREFER, "prefer") \ - V(PROXY_AUTHENTICATE, "proxy-authenticate") \ - V(PROXY_AUTHORIZATION, "proxy-authorization") \ - V(RANGE, "range") \ - V(REFERER, "referer") \ - V(REFRESH, "refresh") \ - V(RETRY_AFTER, "retry-after") \ - V(SERVER, "server") \ - V(SET_COOKIE, "set-cookie") \ - V(STRICT_TRANSPORT_SECURITY, "strict-transport-security") \ - V(TRAILER, "trailer") \ - V(TRANSFER_ENCODING, "transfer-encoding") \ - V(TE, "te") \ - V(TK, "tk") \ - V(UPGRADE_INSECURE_REQUESTS, "upgrade-insecure-requests") \ - V(UPGRADE, "upgrade") \ - V(USER_AGENT, "user-agent") \ - V(VARY, "vary") \ - V(VIA, "via") \ - V(WARNING, "warning") \ - V(WWW_AUTHENTICATE, "www-authenticate") \ - V(X_CONTENT_TYPE_OPTIONS, "x-content-type-options") \ - V(X_FRAME_OPTIONS, "x-frame-options") \ - V(HTTP2_SETTINGS, "http2-settings") \ - V(KEEP_ALIVE, "keep-alive") \ - V(PROXY_CONNECTION, "proxy-connection") - -enum http_known_headers { - HTTP_KNOWN_HEADER_MIN, -#define V(name, value) HTTP_HEADER_##name, - HTTP_KNOWN_HEADERS(V) -#undef V - HTTP_KNOWN_HEADER_MAX -}; - -// While some of these codes are used within the HTTP/2 implementation in -// core, they are provided strictly as a convenience to users and are exposed -// via the require('http2').constants object. -#define HTTP_STATUS_CODES(V) \ - V(CONTINUE, 100) \ - V(SWITCHING_PROTOCOLS, 101) \ - V(PROCESSING, 102) \ - V(EARLY_HINTS, 103) \ - V(OK, 200) \ - V(CREATED, 201) \ - V(ACCEPTED, 202) \ - V(NON_AUTHORITATIVE_INFORMATION, 203) \ - V(NO_CONTENT, 204) \ - V(RESET_CONTENT, 205) \ - V(PARTIAL_CONTENT, 206) \ - V(MULTI_STATUS, 207) \ - V(ALREADY_REPORTED, 208) \ - V(IM_USED, 226) \ - V(MULTIPLE_CHOICES, 300) \ - V(MOVED_PERMANENTLY, 301) \ - V(FOUND, 302) \ - V(SEE_OTHER, 303) \ - V(NOT_MODIFIED, 304) \ - V(USE_PROXY, 305) \ - V(TEMPORARY_REDIRECT, 307) \ - V(PERMANENT_REDIRECT, 308) \ - V(BAD_REQUEST, 400) \ - V(UNAUTHORIZED, 401) \ - V(PAYMENT_REQUIRED, 402) \ - V(FORBIDDEN, 403) \ - V(NOT_FOUND, 404) \ - V(METHOD_NOT_ALLOWED, 405) \ - V(NOT_ACCEPTABLE, 406) \ - V(PROXY_AUTHENTICATION_REQUIRED, 407) \ - V(REQUEST_TIMEOUT, 408) \ - V(CONFLICT, 409) \ - V(GONE, 410) \ - V(LENGTH_REQUIRED, 411) \ - V(PRECONDITION_FAILED, 412) \ - V(PAYLOAD_TOO_LARGE, 413) \ - V(URI_TOO_LONG, 414) \ - V(UNSUPPORTED_MEDIA_TYPE, 415) \ - V(RANGE_NOT_SATISFIABLE, 416) \ - V(EXPECTATION_FAILED, 417) \ - V(TEAPOT, 418) \ - V(MISDIRECTED_REQUEST, 421) \ - V(UNPROCESSABLE_ENTITY, 422) \ - V(LOCKED, 423) \ - V(FAILED_DEPENDENCY, 424) \ - V(TOO_EARLY, 425) \ - V(UPGRADE_REQUIRED, 426) \ - V(PRECONDITION_REQUIRED, 428) \ - V(TOO_MANY_REQUESTS, 429) \ - V(REQUEST_HEADER_FIELDS_TOO_LARGE, 431) \ - V(UNAVAILABLE_FOR_LEGAL_REASONS, 451) \ - V(INTERNAL_SERVER_ERROR, 500) \ - V(NOT_IMPLEMENTED, 501) \ - V(BAD_GATEWAY, 502) \ - V(SERVICE_UNAVAILABLE, 503) \ - V(GATEWAY_TIMEOUT, 504) \ - V(HTTP_VERSION_NOT_SUPPORTED, 505) \ - V(VARIANT_ALSO_NEGOTIATES, 506) \ - V(INSUFFICIENT_STORAGE, 507) \ - V(LOOP_DETECTED, 508) \ - V(BANDWIDTH_LIMIT_EXCEEDED, 509) \ - V(NOT_EXTENDED, 510) \ - V(NETWORK_AUTHENTICATION_REQUIRED, 511) - -enum http_status_codes { -#define V(name, code) HTTP_STATUS_##name = code, - HTTP_STATUS_CODES(V) -#undef V -}; - // The Padding Strategy determines the method by which extra padding is // selected for HEADERS and DATA frames. These are configurable via the // options passed in to a Http2Session object. @@ -446,6 +257,17 @@ class Http2StreamListener : public StreamListener { void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override; }; +struct Http2HeaderTraits { + typedef Http2RcBufferPointer rcbufferpointer_t; + typedef Http2Session allocator_t; + + // HTTP/2 does not support identifying header names by token id. + // HTTP/3 will, however, so we prepare for that now. + static const char* ToHttpHeaderName(int32_t token) { return nullptr; } +}; + +using Http2Header = NgHeader; + class Http2Stream : public AsyncWrap, public StreamBase { public: @@ -476,13 +298,13 @@ class Http2Stream : public AsyncWrap, bool HasWantsWrite() const override { return true; } // Initiate a response on this stream. - int SubmitResponse(nghttp2_nv* nva, size_t len, int options); + int SubmitResponse(const Http2Headers& headers, int options); // Submit informational headers for this stream - int SubmitInfo(nghttp2_nv* nva, size_t len); + int SubmitInfo(const Http2Headers& headers); // Submit trailing headers for this stream - int SubmitTrailers(nghttp2_nv* nva, size_t len); + int SubmitTrailers(const Http2Headers& headers); void OnTrailers(); // Submit a PRIORITY frame for this stream @@ -495,8 +317,7 @@ class Http2Stream : public AsyncWrap, // Submits a PUSH_PROMISE frame with this stream as the parent. Http2Stream* SubmitPushPromise( - nghttp2_nv* nva, - size_t len, + const Http2Headers& headers, int32_t* ret, int options = 0); @@ -545,8 +366,16 @@ class Http2Stream : public AsyncWrap, bool AddHeader(nghttp2_rcbuf* name, nghttp2_rcbuf* value, uint8_t flags); - inline std::vector move_headers() { - return std::move(current_headers_); + template + void TransferHeaders(Fn&& fn) { + size_t i = 0; + for (const auto& header : current_headers_ ) + fn(header, i++); + current_headers_.clear(); + } + + size_t headers_count() const { + return current_headers_.size(); } inline nghttp2_headers_category headers_category() const { @@ -625,7 +454,7 @@ class Http2Stream : public AsyncWrap, // signalling the end of the HEADERS frame nghttp2_headers_category current_headers_category_ = NGHTTP2_HCAT_HEADERS; uint32_t current_headers_length_ = 0; // total number of octets - std::vector current_headers_; + std::vector current_headers_; // This keeps track of the amount of data read from the socket while the // socket was in paused mode. When `ReadStart()` is called (and not before @@ -676,13 +505,13 @@ class Http2Stream::Provider::Stream : public Http2Stream::Provider { void* user_data); }; -typedef struct { +struct SessionJSFields { uint8_t bitfield; uint8_t priority_listener_count; uint8_t frame_error_listener_count; uint32_t max_invalid_frames = 1000; uint32_t max_rejected_streams = 100; -} SessionJSFields; +}; // Indices for js_fields_, which serves as a way to communicate data with JS // land fast. In particular, we store information about the number/presence @@ -743,8 +572,7 @@ class Http2Session : public AsyncWrap, // This only works if the session is a client session. Http2Stream* SubmitRequest( nghttp2_priority_spec* prispec, - nghttp2_nv* nva, - size_t len, + const Http2Headers& headers, int32_t* ret, int options = 0); @@ -1189,96 +1017,6 @@ class Http2Session::Http2Settings : public AsyncWrap { nghttp2_settings_entry entries_[IDX_SETTINGS_COUNT]; }; -class ExternalHeader : - public String::ExternalOneByteStringResource { - public: - explicit ExternalHeader(nghttp2_rcbuf* buf) - : buf_(buf), vec_(nghttp2_rcbuf_get_buf(buf)) { - } - - ~ExternalHeader() override { - nghttp2_rcbuf_decref(buf_); - buf_ = nullptr; - } - - const char* data() const override { - return const_cast(reinterpret_cast(vec_.base)); - } - - size_t length() const override { - return vec_.len; - } - - static inline - MaybeLocal GetInternalizedString(Environment* env, - const nghttp2_vec& vec) { - return String::NewFromOneByte(env->isolate(), - vec.base, - v8::NewStringType::kInternalized, - vec.len); - } - - template - static MaybeLocal New(Http2Session* session, nghttp2_rcbuf* buf) { - Environment* env = session->env(); - if (nghttp2_rcbuf_is_static(buf)) { - auto& static_str_map = env->isolate_data()->http2_static_strs; - v8::Eternal& eternal = static_str_map[buf]; - if (eternal.IsEmpty()) { - Local str = - GetInternalizedString(env, nghttp2_rcbuf_get_buf(buf)) - .ToLocalChecked(); - eternal.Set(env->isolate(), str); - return str; - } - return eternal.Get(env->isolate()); - } - - nghttp2_vec vec = nghttp2_rcbuf_get_buf(buf); - if (vec.len == 0) { - nghttp2_rcbuf_decref(buf); - return String::Empty(env->isolate()); - } - - if (may_internalize && vec.len < 64) { - nghttp2_rcbuf_decref(buf); - // This is a short header name, so there is a good chance V8 already has - // it internalized. - return GetInternalizedString(env, vec); - } - - session->StopTrackingRcbuf(buf); - ExternalHeader* h_str = new ExternalHeader(buf); - MaybeLocal str = String::NewExternalOneByte(env->isolate(), h_str); - if (str.IsEmpty()) - delete h_str; - - return str; - } - - private: - nghttp2_rcbuf* buf_; - nghttp2_vec vec_; -}; - -class Headers { - public: - Headers(Isolate* isolate, Local context, Local headers); - ~Headers() = default; - - nghttp2_nv* operator*() { - return reinterpret_cast(*buf_); - } - - size_t length() const { - return count_; - } - - private: - size_t count_; - MaybeStackBuffer buf_; -}; - class Origins { public: Origins(Isolate* isolate, diff --git a/src/node_http_common-inl.h b/src/node_http_common-inl.h new file mode 100644 index 00000000000..d63cdf79a4b --- /dev/null +++ b/src/node_http_common-inl.h @@ -0,0 +1,181 @@ +#ifndef SRC_NODE_HTTP_COMMON_INL_H_ +#define SRC_NODE_HTTP_COMMON_INL_H_ + +#include "node_http_common.h" +#include "node.h" +#include "node_mem-inl.h" +#include "env-inl.h" +#include "v8.h" + +#include + +namespace node { + +template +NgHeaders::NgHeaders(Environment* env, v8::Local headers) { + v8::Local header_string = + headers->Get(env->context(), 0).ToLocalChecked(); + v8::Local header_count = + headers->Get(env->context(), 1).ToLocalChecked(); + CHECK(header_count->IsUint32()); + CHECK(header_string->IsString()); + count_ = header_count.As()->Value(); + int header_string_len = header_string.As()->Length(); + + if (count_ == 0) { + CHECK_EQ(header_string_len, 0); + return; + } + + buf_.AllocateSufficientStorage((alignof(nv_t) - 1) + + count_ * sizeof(nv_t) + + header_string_len); + + char* start = reinterpret_cast( + RoundUp(reinterpret_cast(*buf_), alignof(nv_t))); + char* header_contents = start + (count_ * sizeof(nv_t)); + nv_t* const nva = reinterpret_cast(start); + + CHECK_LE(header_contents + header_string_len, *buf_ + buf_.length()); + CHECK_EQ(header_string.As()->WriteOneByte( + env->isolate(), + reinterpret_cast(header_contents), + 0, + header_string_len, + v8::String::NO_NULL_TERMINATION), + header_string_len); + + size_t n = 0; + char* p; + for (p = header_contents; p < header_contents + header_string_len; n++) { + if (n >= count_) { + static uint8_t zero = '\0'; + nva[0].name = nva[0].value = &zero; + nva[0].namelen = nva[0].valuelen = 1; + count_ = 1; + return; + } + + nva[n].flags = T::kNoneFlag; + nva[n].name = reinterpret_cast(p); + nva[n].namelen = strlen(p); + p += nva[n].namelen + 1; + nva[n].value = reinterpret_cast(p); + nva[n].valuelen = strlen(p); + p += nva[n].valuelen + 1; + } +} + +size_t GetClientMaxHeaderPairs(size_t max_header_pairs) { + static constexpr size_t min_header_pairs = 1; + return std::max(max_header_pairs, min_header_pairs); +} + +size_t GetServerMaxHeaderPairs(size_t max_header_pairs) { + static constexpr size_t min_header_pairs = 4; + return std::max(max_header_pairs, min_header_pairs); +} + +template +bool NgHeader::IsZeroLength( + NgHeader::rcbuf_t* name, + NgHeader::rcbuf_t* value) { + return IsZeroLength(-1, name, value); +} + +template +bool NgHeader::IsZeroLength( + int32_t token, + NgHeader::rcbuf_t* name, + NgHeader::rcbuf_t* value) { + + if (NgHeader::rcbufferpointer_t::IsZeroLength(value)) + return true; + + const char* header_name = T::ToHttpHeaderName(token); + return header_name != nullptr || + NgHeader::rcbufferpointer_t::IsZeroLength(name); +} + +template +NgHeader::NgHeader( + Environment* env, + NgHeader::rcbuf_t* name, + NgHeader::rcbuf_t* value, + uint8_t flags) + : NgHeader(env, -1, name, value, flags) {} + +template +NgHeader::NgHeader( + Environment* env, + int32_t token, + NgHeader::rcbuf_t* name, + NgHeader::rcbuf_t* value, + uint8_t flags) : env_(env), token_(token), flags_(flags) { + if (token == -1) { + CHECK_NOT_NULL(name); + name_.reset(name, true); // Internalizable + } + CHECK_NOT_NULL(value); + name_.reset(name, true); // Internalizable + value_.reset(value); +} + +template +NgHeader::NgHeader(NgHeader&& other) noexcept + : name_(std::move(other.name_)), + value_(std::move(other.value_)), + token_(other.token_), + flags_(other.flags_) { + other.token_ = -1; + other.flags_ = 0; + other.env_ = nullptr; +} + +template +v8::MaybeLocal NgHeader::GetName( + NgHeader::allocator_t* allocator) const { + + // Not all instances will support using token id's for header names. + // HTTP/2 specifically does not support it. + const char* header_name = T::ToHttpHeaderName(token_); + + // If header_name is not nullptr, then it is a known header with + // a statically defined name. We can safely internalize it here. + if (header_name != nullptr) { + auto& static_str_map = env_->isolate_data()->http_static_strs; + v8::Eternal eternal = static_str_map[header_name]; + if (eternal.IsEmpty()) { + v8::Local str = OneByteString(env_->isolate(), header_name); + eternal.Set(env_->isolate(), str); + return str; + } + return eternal.Get(env_->isolate()); + } + return rcbufferpointer_t::External::New(allocator, name_); +} + +template +v8::MaybeLocal NgHeader::GetValue( + NgHeader::allocator_t* allocator) const { + return rcbufferpointer_t::External::New(allocator, value_); +} + +template +std::string NgHeader::name() const { + return name_.str(); +} + +template +std::string NgHeader::value() const { + return value_.str(); +} + +template +size_t NgHeader::length() const { + return name_.len() + value_.len(); +} + +} // namespace node + +#endif // SRC_NODE_HTTP_COMMON_INL_H_ diff --git a/src/node_http_common.h b/src/node_http_common.h new file mode 100644 index 00000000000..41b5f419d94 --- /dev/null +++ b/src/node_http_common.h @@ -0,0 +1,527 @@ +#ifndef SRC_NODE_HTTP_COMMON_H_ +#define SRC_NODE_HTTP_COMMON_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "v8.h" +#include "node_mem.h" + +#include + +namespace node { + +class Environment; + +#define MAX_MAX_HEADER_LIST_SIZE 16777215u +#define DEFAULT_MAX_HEADER_LIST_PAIRS 128u +#define DEFAULT_MAX_HEADER_LENGTH 8192 + +#define HTTP_SPECIAL_HEADERS(V) \ + V(STATUS, ":status") \ + V(METHOD, ":method") \ + V(AUTHORITY, ":authority") \ + V(SCHEME, ":scheme") \ + V(PATH, ":path") \ + V(PROTOCOL, ":protocol") + +#define HTTP_REGULAR_HEADERS(V) \ + V(ACCEPT_ENCODING, "accept-encoding") \ + V(ACCEPT_LANGUAGE, "accept-language") \ + V(ACCEPT_RANGES, "accept-ranges") \ + V(ACCEPT, "accept") \ + V(ACCESS_CONTROL_ALLOW_CREDENTIALS, "access-control-allow-credentials") \ + V(ACCESS_CONTROL_ALLOW_HEADERS, "access-control-allow-headers") \ + V(ACCESS_CONTROL_ALLOW_METHODS, "access-control-allow-methods") \ + V(ACCESS_CONTROL_ALLOW_ORIGIN, "access-control-allow-origin") \ + V(ACCESS_CONTROL_EXPOSE_HEADERS, "access-control-expose-headers") \ + V(ACCESS_CONTROL_REQUEST_HEADERS, "access-control-request-headers") \ + V(ACCESS_CONTROL_REQUEST_METHOD, "access-control-request-method") \ + V(AGE, "age") \ + V(AUTHORIZATION, "authorization") \ + V(CACHE_CONTROL, "cache-control") \ + V(CONNECTION, "connection") \ + V(CONTENT_DISPOSITION, "content-disposition") \ + V(CONTENT_ENCODING, "content-encoding") \ + V(CONTENT_LENGTH, "content-length") \ + V(CONTENT_TYPE, "content-type") \ + V(COOKIE, "cookie") \ + V(DATE, "date") \ + V(ETAG, "etag") \ + V(FORWARDED, "forwarded") \ + V(HOST, "host") \ + V(IF_MODIFIED_SINCE, "if-modified-since") \ + V(IF_NONE_MATCH, "if-none-match") \ + V(IF_RANGE, "if-range") \ + V(LAST_MODIFIED, "last-modified") \ + V(LINK, "link") \ + V(LOCATION, "location") \ + V(RANGE, "range") \ + V(REFERER, "referer") \ + V(SERVER, "server") \ + V(SET_COOKIE, "set-cookie") \ + V(STRICT_TRANSPORT_SECURITY, "strict-transport-security") \ + V(TRANSFER_ENCODING, "transfer-encoding") \ + V(TE, "te") \ + V(UPGRADE_INSECURE_REQUESTS, "upgrade-insecure-requests") \ + V(UPGRADE, "upgrade") \ + V(USER_AGENT, "user-agent") \ + V(VARY, "vary") \ + V(X_CONTENT_TYPE_OPTIONS, "x-content-type-options") \ + V(X_FRAME_OPTIONS, "x-frame-options") \ + V(KEEP_ALIVE, "keep-alive") \ + V(PROXY_CONNECTION, "proxy-connection") \ + V(X_XSS_PROTECTION, "x-xss-protection") \ + V(ALT_SVC, "alt-svc") \ + V(CONTENT_SECURITY_POLICY, "content-security-policy") \ + V(EARLY_DATA, "early-data") \ + V(EXPECT_CT, "expect-ct") \ + V(ORIGIN, "origin") \ + V(PURPOSE, "purpose") \ + V(TIMING_ALLOW_ORIGIN, "timing-allow-origin") \ + V(X_FORWARDED_FOR, "x-forwarded-for") + +#define HTTP_ADDITIONAL_HEADERS(V) \ + V(ACCEPT_CHARSET, "accept-charset") \ + V(ACCESS_CONTROL_MAX_AGE, "access-control-max-age") \ + V(ALLOW, "allow") \ + V(CONTENT_LANGUAGE, "content-language") \ + V(CONTENT_LOCATION, "content-location") \ + V(CONTENT_MD5, "content-md5") \ + V(CONTENT_RANGE, "content-range") \ + V(DNT, "dnt") \ + V(EXPECT, "expect") \ + V(EXPIRES, "expires") \ + V(FROM, "from") \ + V(IF_MATCH, "if-match") \ + V(IF_UNMODIFIED_SINCE, "if-unmodified-since") \ + V(MAX_FORWARDS, "max-forwards") \ + V(PREFER, "prefer") \ + V(PROXY_AUTHENTICATE, "proxy-authenticate") \ + V(PROXY_AUTHORIZATION, "proxy-authorization") \ + V(REFRESH, "refresh") \ + V(RETRY_AFTER, "retry-after") \ + V(TRAILER, "trailer") \ + V(TK, "tk") \ + V(VIA, "via") \ + V(WARNING, "warning") \ + V(WWW_AUTHENTICATE, "www-authenticate") \ + V(HTTP2_SETTINGS, "http2-settings") + +// Special and regular headers are handled specifically by the HTTP/2 (and +// later HTTP/3) implementation. +#define HTTP_KNOWN_HEADERS(V) \ + HTTP_SPECIAL_HEADERS(V) \ + HTTP_REGULAR_HEADERS(V) \ + HTTP_ADDITIONAL_HEADERS(V) + +enum http_known_headers { + HTTP_KNOWN_HEADER_MIN, +#define V(name, value) HTTP_HEADER_##name, + HTTP_KNOWN_HEADERS(V) +#undef V + HTTP_KNOWN_HEADER_MAX +}; + +#define HTTP_STATUS_CODES(V) \ + V(CONTINUE, 100) \ + V(SWITCHING_PROTOCOLS, 101) \ + V(PROCESSING, 102) \ + V(EARLY_HINTS, 103) \ + V(OK, 200) \ + V(CREATED, 201) \ + V(ACCEPTED, 202) \ + V(NON_AUTHORITATIVE_INFORMATION, 203) \ + V(NO_CONTENT, 204) \ + V(RESET_CONTENT, 205) \ + V(PARTIAL_CONTENT, 206) \ + V(MULTI_STATUS, 207) \ + V(ALREADY_REPORTED, 208) \ + V(IM_USED, 226) \ + V(MULTIPLE_CHOICES, 300) \ + V(MOVED_PERMANENTLY, 301) \ + V(FOUND, 302) \ + V(SEE_OTHER, 303) \ + V(NOT_MODIFIED, 304) \ + V(USE_PROXY, 305) \ + V(TEMPORARY_REDIRECT, 307) \ + V(PERMANENT_REDIRECT, 308) \ + V(BAD_REQUEST, 400) \ + V(UNAUTHORIZED, 401) \ + V(PAYMENT_REQUIRED, 402) \ + V(FORBIDDEN, 403) \ + V(NOT_FOUND, 404) \ + V(METHOD_NOT_ALLOWED, 405) \ + V(NOT_ACCEPTABLE, 406) \ + V(PROXY_AUTHENTICATION_REQUIRED, 407) \ + V(REQUEST_TIMEOUT, 408) \ + V(CONFLICT, 409) \ + V(GONE, 410) \ + V(LENGTH_REQUIRED, 411) \ + V(PRECONDITION_FAILED, 412) \ + V(PAYLOAD_TOO_LARGE, 413) \ + V(URI_TOO_LONG, 414) \ + V(UNSUPPORTED_MEDIA_TYPE, 415) \ + V(RANGE_NOT_SATISFIABLE, 416) \ + V(EXPECTATION_FAILED, 417) \ + V(TEAPOT, 418) \ + V(MISDIRECTED_REQUEST, 421) \ + V(UNPROCESSABLE_ENTITY, 422) \ + V(LOCKED, 423) \ + V(FAILED_DEPENDENCY, 424) \ + V(TOO_EARLY, 425) \ + V(UPGRADE_REQUIRED, 426) \ + V(PRECONDITION_REQUIRED, 428) \ + V(TOO_MANY_REQUESTS, 429) \ + V(REQUEST_HEADER_FIELDS_TOO_LARGE, 431) \ + V(UNAVAILABLE_FOR_LEGAL_REASONS, 451) \ + V(INTERNAL_SERVER_ERROR, 500) \ + V(NOT_IMPLEMENTED, 501) \ + V(BAD_GATEWAY, 502) \ + V(SERVICE_UNAVAILABLE, 503) \ + V(GATEWAY_TIMEOUT, 504) \ + V(HTTP_VERSION_NOT_SUPPORTED, 505) \ + V(VARIANT_ALSO_NEGOTIATES, 506) \ + V(INSUFFICIENT_STORAGE, 507) \ + V(LOOP_DETECTED, 508) \ + V(BANDWIDTH_LIMIT_EXCEEDED, 509) \ + V(NOT_EXTENDED, 510) \ + V(NETWORK_AUTHENTICATION_REQUIRED, 511) + +enum http_status_codes { +#define V(name, code) HTTP_STATUS_##name = code, + HTTP_STATUS_CODES(V) +#undef V +}; + +// Unlike the HTTP/1 implementation, the HTTP/2 implementation is not limited +// to a fixed number of known supported HTTP methods. These constants, therefore +// are provided strictly as a convenience to users and are exposed via the +// require('http2').constants object. +#define HTTP_KNOWN_METHODS(V) \ + V(ACL, "ACL") \ + V(BASELINE_CONTROL, "BASELINE-CONTROL") \ + V(BIND, "BIND") \ + V(CHECKIN, "CHECKIN") \ + V(CHECKOUT, "CHECKOUT") \ + V(CONNECT, "CONNECT") \ + V(COPY, "COPY") \ + V(DELETE, "DELETE") \ + V(GET, "GET") \ + V(HEAD, "HEAD") \ + V(LABEL, "LABEL") \ + V(LINK, "LINK") \ + V(LOCK, "LOCK") \ + V(MERGE, "MERGE") \ + V(MKACTIVITY, "MKACTIVITY") \ + V(MKCALENDAR, "MKCALENDAR") \ + V(MKCOL, "MKCOL") \ + V(MKREDIRECTREF, "MKREDIRECTREF") \ + V(MKWORKSPACE, "MKWORKSPACE") \ + V(MOVE, "MOVE") \ + V(OPTIONS, "OPTIONS") \ + V(ORDERPATCH, "ORDERPATCH") \ + V(PATCH, "PATCH") \ + V(POST, "POST") \ + V(PRI, "PRI") \ + V(PROPFIND, "PROPFIND") \ + V(PROPPATCH, "PROPPATCH") \ + V(PUT, "PUT") \ + V(REBIND, "REBIND") \ + V(REPORT, "REPORT") \ + V(SEARCH, "SEARCH") \ + V(TRACE, "TRACE") \ + V(UNBIND, "UNBIND") \ + V(UNCHECKOUT, "UNCHECKOUT") \ + V(UNLINK, "UNLINK") \ + V(UNLOCK, "UNLOCK") \ + V(UPDATE, "UPDATE") \ + V(UPDATEREDIRECTREF, "UPDATEREDIRECTREF") \ + V(VERSION_CONTROL, "VERSION-CONTROL") + +// NgHeaders takes as input a block of headers provided by the +// JavaScript side (see http2's mapToHeaders function) and +// converts it into a array of ng header structs. This is done +// generically to handle both http/2 and (in the future) http/3, +// which use nearly identical structs. The template parameter +// takes a Traits type that defines the ng header struct and +// the kNoneFlag value. See Http2HeaderTraits in node_http2.h +// for an example. +template +class NgHeaders { + public: + typedef typename T::nv_t nv_t; + inline NgHeaders(Environment* env, v8::Local headers); + ~NgHeaders() = default; + + const nv_t* operator*() const { + return reinterpret_cast(*buf_); + } + + const nv_t* data() const { + return reinterpret_cast(*buf_); + } + + size_t length() const { + return count_; + } + + private: + size_t count_; + MaybeStackBuffer buf_; +}; + +// The ng libraries (nghttp2 and nghttp3) each use nearly identical +// reference counted structures for retaining header name and value +// information in memory until the application is done with it. +// The NgRcBufPointer is an intelligent pointer capable of working +// with either type, handling the ref counting increment and +// decrement as appropriate. The Template takes a single Traits +// type that provides the rc buffer and vec type, as well as +// implementations for multiple static functions. +// See Http2RcBufferPointerTraits in node_http2.h for an example. +template +class NgRcBufPointer : public MemoryRetainer { + public: + typedef typename T::rcbuf_t rcbuf_t; + typedef typename T::vector_t vector_t; + + NgRcBufPointer() = default; + + explicit NgRcBufPointer(rcbuf_t* buf) { + reset(buf); + } + + template + NgRcBufPointer(const NgRcBufPointer& other) { + reset(other.get()); + } + + NgRcBufPointer(const NgRcBufPointer& other) { + reset(other.get()); + } + + template + NgRcBufPointer& operator=(const NgRcBufPointer& other) { + if (other.get() == get()) return *this; + this->~NgRcBufPointer(); + return *new (this) NgRcBufPointer(other); + } + + NgRcBufPointer& operator=(const NgRcBufPointer& other) { + if (other.get() == get()) return *this; + this->~NgRcBufPointer(); + return *new (this) NgRcBufPointer(other); + } + + NgRcBufPointer(NgRcBufPointer&& other) { + this->~NgRcBufPointer(); + buf_ = other.buf_; + other.buf_ = nullptr; + } + + NgRcBufPointer& operator=(NgRcBufPointer&& other) { + this->~NgRcBufPointer(); + return *new (this) NgRcBufPointer(std::move(other)); + } + + ~NgRcBufPointer() { + reset(); + } + + // Returns the underlying ngvec for this rcbuf + uint8_t* data() const { + vector_t v = T::get_vec(buf_); + return v.base; + } + + size_t len() const { + vector_t v = T::get_vec(buf_); + return v.len; + } + + std::string str() const { + return std::string(reinterpret_cast(data()), len()); + } + + void reset(rcbuf_t* ptr = nullptr, bool internalizable = false) { + if (buf_ == ptr) + return; + + if (buf_ != nullptr) + T::dec(buf_); + + buf_ = ptr; + + if (ptr != nullptr) { + T::inc(ptr); + internalizable_ = internalizable; + } + } + + rcbuf_t* get() const { return buf_; } + rcbuf_t& operator*() const { return *get(); } + rcbuf_t* operator->() const { return buf_; } + operator bool() const { return buf_ != nullptr; } + bool IsStatic() const { return T::is_static(buf_) != 0; } + void SetInternalizable() { internalizable_ = true; } + bool IsInternalizable() const { return internalizable_; } + + static inline bool IsZeroLength(rcbuf_t* buf) { + if (buf == nullptr) + return true; + vector_t b = T::get_vec(buf); + return b.len == 0; + } + + void MemoryInfo(MemoryTracker* tracker) const override { + tracker->TrackFieldWithSize("buf", len(), "buf"); + } + + SET_MEMORY_INFO_NAME(NgRcBufPointer) + SET_SELF_SIZE(NgRcBufPointer) + + class External : public v8::String::ExternalOneByteStringResource { + public: + explicit External(const NgRcBufPointer& ptr) : ptr_(ptr) {} + + const char* data() const override { + return const_cast(reinterpret_cast(ptr_.data())); + } + + size_t length() const override { + return ptr_.len(); + } + + static inline + v8::MaybeLocal GetInternalizedString( + Environment* env, + const NgRcBufPointer& ptr) { + return v8::String::NewFromOneByte( + env->isolate(), + ptr.data(), + v8::NewStringType::kInternalized, + ptr.len()); + } + + template + static v8::MaybeLocal New( + Allocator* allocator, + NgRcBufPointer ptr) { + Environment* env = allocator->env(); + if (ptr.IsStatic()) { + auto& static_str_map = env->isolate_data()->http_static_strs; + const char* header_name = reinterpret_cast(ptr.data()); + v8::Eternal& eternal = static_str_map[header_name]; + if (eternal.IsEmpty()) { + v8::Local str = + GetInternalizedString(env, ptr).ToLocalChecked(); + eternal.Set(env->isolate(), str); + return str; + } + return eternal.Get(env->isolate()); + } + + size_t len = ptr.len(); + + if (len == 0) { + ptr.reset(); + return v8::String::Empty(env->isolate()); + } + + if (ptr.IsInternalizable() && len < 64) { + v8::MaybeLocal ret = GetInternalizedString(env, ptr); + ptr.reset(); + return ret; + } + + allocator->StopTrackingMemory(ptr.get()); + External* h_str = new External(std::move(ptr)); + v8::MaybeLocal str = + v8::String::NewExternalOneByte(env->isolate(), h_str); + if (str.IsEmpty()) + delete h_str; + + return str; + } + + private: + NgRcBufPointer ptr_; + }; + + private: + rcbuf_t* buf_ = nullptr; + bool internalizable_ = false; +}; + +// The ng libraries use nearly identical structs to represent +// received http headers. The NgHeader class wraps those in a +// consistent way and allows converting the name and value to +// v8 strings. The template is given a Traits type that provides +// the NgRcBufPointer type, the NgLibMemoryManager to use for +// memory tracking, and implementation of static utility functions. +// See Http2HeaderTraits in node_http2.h for an example. +template +class NgHeader : public MemoryRetainer { + public: + typedef typename T::rcbufferpointer_t rcbufferpointer_t; + typedef typename T::rcbufferpointer_t::rcbuf_t rcbuf_t; + typedef typename T::allocator_t allocator_t; + + inline static bool IsZeroLength(rcbuf_t* name, rcbuf_t* value); + inline static bool IsZeroLength(int32_t token, rcbuf_t* name, rcbuf_t* value); + inline NgHeader( + Environment* env, + rcbuf_t* name, + rcbuf_t* value, + uint8_t flags); + inline NgHeader( + Environment* env, + int32_t token, + rcbuf_t* name, + rcbuf_t* value, + uint8_t flags); + inline NgHeader(NgHeader&& other) noexcept; + + // Calling GetName and GetValue will have the effect of releasing + // control over the reference counted buffer from this NgHeader + // object to the v8 string. Once the v8 string is garbage collected, + // the reference counter will be decremented. + + inline v8::MaybeLocal GetName(allocator_t* allocator) const; + inline v8::MaybeLocal GetValue(allocator_t* allocator) const; + + inline std::string name() const; + inline std::string value() const; + inline size_t length() const; + + void MemoryInfo(MemoryTracker* tracker) const override { + tracker->TrackField("name", name_); + tracker->TrackField("value", value_); + } + + SET_MEMORY_INFO_NAME(NgHeader) + SET_SELF_SIZE(NgHeader) + + std::string ToString() const { + std::string ret = name(); + ret += " = "; + ret += value(); + return ret; + } + + private: + Environment* env_; + rcbufferpointer_t name_; + rcbufferpointer_t value_; + int32_t token_ = -1; + uint8_t flags_ = 0; +}; + +inline size_t GetServerMaxHeaderPairs(size_t max_header_pairs); +inline size_t GetClientMaxHeaderPairs(size_t max_header_pairs); + +} // namespace node + +#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#endif // SRC_NODE_HTTP_COMMON_H_ diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index a8c48999c57..75d7e89a91c 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -330,6 +330,7 @@ class Parser : public AsyncWrap, public StreamListener { this, InternalCallbackScope::kSkipTaskQueues); head_response = cb.As()->Call( env()->context(), object(), arraysize(argv), argv); + if (head_response.IsEmpty()) callback_scope.MarkAsFailed(); } int64_t val; @@ -401,6 +402,7 @@ class Parser : public AsyncWrap, public StreamListener { InternalCallbackScope callback_scope( this, InternalCallbackScope::kSkipTaskQueues); r = cb.As()->Call(env()->context(), object(), 0, nullptr); + if (r.IsEmpty()) callback_scope.MarkAsFailed(); } if (r.IsEmpty()) { @@ -871,7 +873,7 @@ void InitializeHttpParser(Local target, void* priv) { Environment* env = Environment::GetCurrent(context); Local t = env->NewFunctionTemplate(Parser::New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount(Parser::kInternalFieldCount); t->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser")); t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "REQUEST"), diff --git a/src/node_i18n.cc b/src/node_i18n.cc index 46c6ef39f86..a32ddf2066b 100644 --- a/src/node_i18n.cc +++ b/src/node_i18n.cc @@ -172,7 +172,7 @@ class ConverterObject : public BaseObject, Converter { HandleScope scope(env->isolate()); Local t = ObjectTemplate::New(env->isolate()); - t->SetInternalFieldCount(1); + t->SetInternalFieldCount(ConverterObject::kInternalFieldCount); Local obj; if (!t->NewInstance(env->context()).ToLocal(&obj)) return; diff --git a/src/node_internals.h b/src/node_internals.h index 8d63f023c1f..7dcbf65f8e6 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -99,6 +99,7 @@ std::string GetProcessTitle(const char* default_title); std::string GetHumanReadableProcessName(); void InitializeContextRuntime(v8::Local); +bool InitializePrimordials(v8::Local context); namespace task_queue { void PromiseRejectCallback(v8::PromiseRejectMessage message); @@ -198,6 +199,7 @@ static v8::MaybeLocal New(Environment* env, v8::MaybeLocal InternalMakeCallback( Environment* env, + v8::Local resource, v8::Local recv, const v8::Local callback, int argc, diff --git a/src/node_main.cc b/src/node_main.cc index e92c0df9429..00f3f2a4836 100644 --- a/src/node_main.cc +++ b/src/node_main.cc @@ -28,9 +28,12 @@ #include int wmain(int argc, wchar_t* wargv[]) { - if (!IsWindows7OrGreater()) { - fprintf(stderr, "This application is only supported on Windows 7, " - "Windows Server 2008 R2, or higher."); + // Windows Server 2012 (not R2) is supported until 10/10/2023, so we allow it + // to run in the experimental support tier. + if (!IsWindows8Point1OrGreater() && + !(IsWindowsServer() && IsWindows8OrGreater())) { + fprintf(stderr, "This application is only supported on Windows 8.1, " + "Windows Server 2012 R2, or higher."); exit(ERROR_EXE_MACHINE_TYPE_MISMATCH); } diff --git a/src/node_main_instance.cc b/src/node_main_instance.cc index d53eaa7329b..6f240d7e809 100644 --- a/src/node_main_instance.cc +++ b/src/node_main_instance.cc @@ -122,14 +122,7 @@ int NodeMainInstance::Run() { Context::Scope context_scope(env->context()); if (exit_code == 0) { - { - InternalCallbackScope callback_scope( - env.get(), - Object::New(isolate_), - { 1, 0 }, - InternalCallbackScope::kSkipAsyncHooks); - LoadEnvironment(env.get()); - } + LoadEnvironment(env.get()); env->set_trace_sync_io(env->options()->trace_sync_io); diff --git a/src/node_messaging.cc b/src/node_messaging.cc index 248e0f041de..c52a0698744 100644 --- a/src/node_messaging.cc +++ b/src/node_messaging.cc @@ -935,7 +935,8 @@ Local GetMessagePortConstructorTemplate(Environment* env) { { Local m = env->NewFunctionTemplate(MessagePort::New); m->SetClassName(env->message_port_constructor_string()); - m->InstanceTemplate()->SetInternalFieldCount(1); + m->InstanceTemplate()->SetInternalFieldCount( + MessagePort::kInternalFieldCount); m->Inherit(HandleWrap::GetConstructorTemplate(env)); env->SetProtoMethod(m, "postMessage", MessagePort::PostMessage); diff --git a/src/node_options.cc b/src/node_options.cc index 93f1e465e26..3bf1031f166 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -616,6 +616,10 @@ PerIsolateOptionsParser::PerIsolateOptionsParser( "disallow eval and friends", V8Option{}, kAllowedInEnvironment); + AddOption("--jitless", + "disable runtime allocation of executable memory", + V8Option{}, + kAllowedInEnvironment); #ifdef NODE_REPORT AddOption("--report-uncaught-exception", diff --git a/src/node_os.cc b/src/node_os.cc index 12a4ec3551a..b64b75fa6b9 100644 --- a/src/node_os.cc +++ b/src/node_os.cc @@ -75,8 +75,7 @@ static void GetHostname(const FunctionCallbackInfo& args) { .ToLocalChecked()); } - -static void GetOSType(const FunctionCallbackInfo& args) { +static void GetOSInformation(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); uv_utsname_t info; int err = uv_os_uname(&info); @@ -87,29 +86,18 @@ static void GetOSType(const FunctionCallbackInfo& args) { return args.GetReturnValue().SetUndefined(); } - args.GetReturnValue().Set( - String::NewFromUtf8(env->isolate(), info.sysname, NewStringType::kNormal) - .ToLocalChecked()); -} - - -static void GetOSRelease(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - uv_utsname_t info; - int err = uv_os_uname(&info); - - if (err != 0) { - CHECK_GE(args.Length(), 1); - env->CollectUVExceptionInfo(args[args.Length() - 1], err, "uv_os_uname"); - return args.GetReturnValue().SetUndefined(); - } + // [sysname, version, release] + Local osInformation[] = { + String::NewFromUtf8(env->isolate(), info.sysname).ToLocalChecked(), + String::NewFromUtf8(env->isolate(), info.version).ToLocalChecked(), + String::NewFromUtf8(env->isolate(), info.release).ToLocalChecked() + }; - args.GetReturnValue().Set( - String::NewFromUtf8(env->isolate(), info.release, NewStringType::kNormal) - .ToLocalChecked()); + args.GetReturnValue().Set(Array::New(env->isolate(), + osInformation, + arraysize(osInformation))); } - static void GetCPUInfo(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); Isolate* isolate = env->isolate(); @@ -398,13 +386,12 @@ void Initialize(Local target, env->SetMethod(target, "getTotalMem", GetTotalMemory); env->SetMethod(target, "getFreeMem", GetFreeMemory); env->SetMethod(target, "getCPUs", GetCPUInfo); - env->SetMethod(target, "getOSType", GetOSType); - env->SetMethod(target, "getOSRelease", GetOSRelease); env->SetMethod(target, "getInterfaceAddresses", GetInterfaceAddresses); env->SetMethod(target, "getHomeDirectory", GetHomeDirectory); env->SetMethod(target, "getUserInfo", GetUserInfo); env->SetMethod(target, "setPriority", SetPriority); env->SetMethod(target, "getPriority", GetPriority); + env->SetMethod(target, "getOSInformation", GetOSInformation); target->Set(env->context(), FIXED_ONE_BYTE_STRING(env->isolate(), "isBigEndian"), Boolean::New(env->isolate(), IsBigEndian())).Check(); diff --git a/src/node_perf.cc b/src/node_perf.cc index 68b015f33ed..21766d3b89a 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -640,7 +640,8 @@ void Initialize(Local target, Local eldh = env->NewFunctionTemplate(ELDHistogramNew); eldh->SetClassName(eldh_classname); - eldh->InstanceTemplate()->SetInternalFieldCount(1); + eldh->InstanceTemplate()->SetInternalFieldCount( + ELDHistogram::kInternalFieldCount); env->SetProtoMethod(eldh, "exceeds", ELDHistogramExceeds); env->SetProtoMethod(eldh, "min", ELDHistogramMin); env->SetProtoMethod(eldh, "max", ELDHistogramMax); diff --git a/src/node_perf.h b/src/node_perf.h index 4f5ca93f223..ac65533a772 100644 --- a/src/node_perf.h +++ b/src/node_perf.h @@ -161,7 +161,7 @@ class ELDHistogram : public HandleWrap, public Histogram { exceeds_ = 0; prev_ = 0; } - int64_t Exceeds() { return exceeds_; } + int64_t Exceeds() const { return exceeds_; } void MemoryInfo(MemoryTracker* tracker) const override { tracker->TrackFieldWithSize("histogram", GetMemorySize()); diff --git a/src/node_serdes.cc b/src/node_serdes.cc index a2d185c4167..bcdcd19b261 100644 --- a/src/node_serdes.cc +++ b/src/node_serdes.cc @@ -451,7 +451,8 @@ void Initialize(Local target, Local ser = env->NewFunctionTemplate(SerializerContext::New); - ser->InstanceTemplate()->SetInternalFieldCount(1); + ser->InstanceTemplate()->SetInternalFieldCount( + SerializerContext::kInternalFieldCount); env->SetProtoMethod(ser, "writeHeader", SerializerContext::WriteHeader); env->SetProtoMethod(ser, "writeValue", SerializerContext::WriteValue); @@ -477,7 +478,8 @@ void Initialize(Local target, Local des = env->NewFunctionTemplate(DeserializerContext::New); - des->InstanceTemplate()->SetInternalFieldCount(1); + des->InstanceTemplate()->SetInternalFieldCount( + DeserializerContext::kInternalFieldCount); env->SetProtoMethod(des, "readHeader", DeserializerContext::ReadHeader); env->SetProtoMethod(des, "readValue", DeserializerContext::ReadValue); diff --git a/src/node_sockaddr-inl.h b/src/node_sockaddr-inl.h new file mode 100644 index 00000000000..a9d0ed061a1 --- /dev/null +++ b/src/node_sockaddr-inl.h @@ -0,0 +1,170 @@ +#ifndef SRC_NODE_SOCKADDR_INL_H_ +#define SRC_NODE_SOCKADDR_INL_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "node.h" +#include "node_internals.h" +#include "node_sockaddr.h" +#include "util-inl.h" + +#include + +namespace node { + +static constexpr uint32_t kLabelMask = 0xFFFFF; + +inline void hash_combine(size_t* seed) { } + +template +inline void hash_combine(size_t* seed, const T& value, Args... rest) { + *seed ^= std::hash{}(value) + 0x9e3779b9 + (*seed << 6) + (*seed >> 2); + hash_combine(seed, rest...); +} + +bool SocketAddress::is_numeric_host(const char* hostname) { + return is_numeric_host(hostname, AF_INET) || + is_numeric_host(hostname, AF_INET6); +} + +bool SocketAddress::is_numeric_host(const char* hostname, int family) { + in6_addr dst; + return inet_pton(family, hostname, &dst) == 1; +} + +int SocketAddress::GetPort(const sockaddr* addr) { + CHECK(addr->sa_family == AF_INET || addr->sa_family == AF_INET6); + return ntohs(addr->sa_family == AF_INET ? + reinterpret_cast(addr)->sin_port : + reinterpret_cast(addr)->sin6_port); +} + +int SocketAddress::GetPort(const sockaddr_storage* addr) { + return GetPort(reinterpret_cast(addr)); +} + +std::string SocketAddress::GetAddress(const sockaddr* addr) { + CHECK(addr->sa_family == AF_INET || addr->sa_family == AF_INET6); + char host[INET6_ADDRSTRLEN]; + const void* src = addr->sa_family == AF_INET ? + static_cast( + &(reinterpret_cast(addr)->sin_addr)) : + static_cast( + &(reinterpret_cast(addr)->sin6_addr)); + uv_inet_ntop(addr->sa_family, src, host, INET6_ADDRSTRLEN); + return std::string(host); +} + +std::string SocketAddress::GetAddress(const sockaddr_storage* addr) { + return GetAddress(reinterpret_cast(addr)); +} + +size_t SocketAddress::GetLength(const sockaddr* addr) { + return addr->sa_family == AF_INET ? + sizeof(sockaddr_in) : sizeof(sockaddr_in6); +} + +size_t SocketAddress::GetLength(const sockaddr_storage* addr) { + return GetLength(reinterpret_cast(addr)); +} + +SocketAddress::SocketAddress(const sockaddr* addr) { + memcpy(&address_, addr, GetLength(addr)); +} + +SocketAddress::SocketAddress(const SocketAddress& addr) { + memcpy(&address_, &addr.address_, addr.length()); +} + +SocketAddress& SocketAddress::operator=(const sockaddr* addr) { + memcpy(&address_, addr, GetLength(addr)); + return *this; +} + +SocketAddress& SocketAddress::operator=(const SocketAddress& addr) { + memcpy(&address_, &addr.address_, addr.length()); + return *this; +} + +const sockaddr& SocketAddress::operator*() const { + return *this->data(); +} + +const sockaddr* SocketAddress::operator->() const { + return this->data(); +} + +size_t SocketAddress::length() const { + return GetLength(&address_); +} + +const sockaddr* SocketAddress::data() const { + return reinterpret_cast(&address_); +} + +const uint8_t* SocketAddress::raw() const { + return reinterpret_cast(&address_); +} + +sockaddr* SocketAddress::storage() { + return reinterpret_cast(&address_); +} + +int SocketAddress::family() const { + return address_.ss_family; +} + +std::string SocketAddress::address() const { + return GetAddress(&address_); +} + +int SocketAddress::port() const { + return GetPort(&address_); +} + +uint32_t SocketAddress::flow_label() const { + if (family() != AF_INET6) + return 0; + const sockaddr_in6* in = reinterpret_cast(data()); + return in->sin6_flowinfo; +} + +void SocketAddress::set_flow_label(uint32_t label) { + if (family() != AF_INET6) + return; + CHECK_LE(label, kLabelMask); + sockaddr_in6* in = reinterpret_cast(&address_); + in->sin6_flowinfo = label; +} + +std::string SocketAddress::ToString() const { + if (family() != AF_INET && family() != AF_INET6) return ""; + return (family() == AF_INET6 ? + std::string("[") + address() + "]:" : + address() + ":") + + std::to_string(port()); +} + +void SocketAddress::Update(uint8_t* data, size_t len) { + CHECK_LE(len, sizeof(address_)); + memcpy(&address_, data, len); +} + +v8::Local SocketAddress::ToJS( + Environment* env, + v8::Local info) const { + return AddressToJS(env, data(), info); +} + +bool SocketAddress::operator==(const SocketAddress& other) const { + if (family() != other.family()) return false; + return memcmp(raw(), other.raw(), length()) == 0; +} + +bool SocketAddress::operator!=(const SocketAddress& other) const { + return !(*this == other); +} +} // namespace node + +#endif // NODE_WANT_INTERNALS +#endif // SRC_NODE_SOCKADDR_INL_H_ diff --git a/src/node_sockaddr.cc b/src/node_sockaddr.cc new file mode 100644 index 00000000000..74fe123529a --- /dev/null +++ b/src/node_sockaddr.cc @@ -0,0 +1,95 @@ +#include "node_sockaddr-inl.h" // NOLINT(build/include) +#include "uv.h" + +namespace node { + +namespace { +template +SocketAddress FromUVHandle(F fn, const T& handle) { + SocketAddress addr; + int len = sizeof(sockaddr_storage); + if (fn(&handle, addr.storage(), &len) == 0) + CHECK_EQ(static_cast(len), addr.length()); + else + addr.storage()->sa_family = 0; + return addr; +} +} // namespace + +bool SocketAddress::ToSockAddr( + int32_t family, + const char* host, + uint32_t port, + sockaddr_storage* addr) { + switch (family) { + case AF_INET: + return uv_ip4_addr( + host, + port, + reinterpret_cast(addr)) == 0; + case AF_INET6: + return uv_ip6_addr( + host, + port, + reinterpret_cast(addr)) == 0; + default: + UNREACHABLE(); + } +} + +bool SocketAddress::New( + const char* host, + uint32_t port, + SocketAddress* addr) { + return New(AF_INET, host, port, addr) || New(AF_INET6, host, port, addr); +} + +bool SocketAddress::New( + int32_t family, + const char* host, + uint32_t port, + SocketAddress* addr) { + return ToSockAddr(family, host, port, + reinterpret_cast(addr->storage())); +} + +size_t SocketAddress::Hash::operator()(const SocketAddress& addr) const { + size_t hash = 0; + switch (addr.family()) { + case AF_INET: { + const sockaddr_in* ipv4 = + reinterpret_cast(addr.raw()); + hash_combine(&hash, ipv4->sin_port, ipv4->sin_addr.s_addr); + break; + } + case AF_INET6: { + const sockaddr_in6* ipv6 = + reinterpret_cast(addr.raw()); + const uint64_t* a = + reinterpret_cast(&ipv6->sin6_addr); + hash_combine(&hash, ipv6->sin6_port, a[0], a[1]); + break; + } + default: + UNREACHABLE(); + } + return hash; +} + +SocketAddress SocketAddress::FromSockName(const uv_tcp_t& handle) { + return FromUVHandle(uv_tcp_getsockname, handle); +} + +SocketAddress SocketAddress::FromSockName(const uv_udp_t& handle) { + return FromUVHandle(uv_udp_getsockname, handle); +} + +SocketAddress SocketAddress::FromPeerName(const uv_tcp_t& handle) { + return FromUVHandle(uv_tcp_getpeername, handle); +} + +SocketAddress SocketAddress::FromPeerName(const uv_udp_t& handle) { + return FromUVHandle(uv_udp_getpeername, handle); +} + +} // namespace node diff --git a/src/node_sockaddr.h b/src/node_sockaddr.h new file mode 100644 index 00000000000..2e3ae09ce3b --- /dev/null +++ b/src/node_sockaddr.h @@ -0,0 +1,122 @@ +#ifndef SRC_NODE_SOCKADDR_H_ +#define SRC_NODE_SOCKADDR_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "env.h" +#include "memory_tracker.h" +#include "node.h" +#include "uv.h" +#include "v8.h" + +#include +#include + +namespace node { + +class SocketAddress : public MemoryRetainer { + public: + struct Hash { + size_t operator()(const SocketAddress& addr) const; + }; + + inline bool operator==(const SocketAddress& other) const; + inline bool operator!=(const SocketAddress& other) const; + + inline static bool is_numeric_host(const char* hostname); + inline static bool is_numeric_host(const char* hostname, int family); + + // Returns true if converting {family, host, port} to *addr succeeded. + static bool ToSockAddr( + int32_t family, + const char* host, + uint32_t port, + sockaddr_storage* addr); + + // Returns true if converting {family, host, port} to *addr succeeded. + static bool New( + int32_t family, + const char* host, + uint32_t port, + SocketAddress* addr); + + static bool New( + const char* host, + uint32_t port, + SocketAddress* addr); + + // Returns the port for an IPv4 or IPv6 address. + inline static int GetPort(const sockaddr* addr); + inline static int GetPort(const sockaddr_storage* addr); + + // Returns the numeric host as a string for an IPv4 or IPv6 address. + inline static std::string GetAddress(const sockaddr* addr); + inline static std::string GetAddress(const sockaddr_storage* addr); + + // Returns the struct length for an IPv4, IPv6 or UNIX domain. + inline static size_t GetLength(const sockaddr* addr); + inline static size_t GetLength(const sockaddr_storage* addr); + + SocketAddress() = default; + + inline explicit SocketAddress(const sockaddr* addr); + inline SocketAddress(const SocketAddress& addr); + inline SocketAddress& operator=(const sockaddr* other); + inline SocketAddress& operator=(const SocketAddress& other); + + inline const sockaddr& operator*() const; + inline const sockaddr* operator->() const; + + inline const sockaddr* data() const; + inline const uint8_t* raw() const; + inline sockaddr* storage(); + inline size_t length() const; + + inline int family() const; + inline std::string address() const; + inline int port() const; + + // If the SocketAddress is an IPv6 address, returns the + // current value of the IPv6 flow label, if set. Otherwise + // returns 0. + inline uint32_t flow_label() const; + + // If the SocketAddress is an IPv6 address, sets the + // current value of the IPv6 flow label. If not an + // IPv6 address, set_flow_label is a non-op. It + // is important to note that the flow label, + // while represented as an uint32_t, the flow + // label is strictly limited to 20 bits, and + // this will assert if any value larger than + // 20-bits is specified. + inline void set_flow_label(uint32_t label = 0); + + inline void Update(uint8_t* data, size_t len); + + static SocketAddress FromSockName(const uv_udp_t& handle); + static SocketAddress FromSockName(const uv_tcp_t& handle); + static SocketAddress FromPeerName(const uv_udp_t& handle); + static SocketAddress FromPeerName(const uv_tcp_t& handle); + + inline v8::Local ToJS( + Environment* env, + v8::Local obj = v8::Local()) const; + + inline std::string ToString() const; + + SET_NO_MEMORY_INFO() + SET_MEMORY_INFO_NAME(SocketAddress) + SET_SELF_SIZE(SocketAddress) + + template + using Map = std::unordered_map; + + private: + sockaddr_storage address_; +}; + +} // namespace node + +#endif // NOE_WANT_INTERNALS + +#endif // SRC_NODE_SOCKADDR_H_ diff --git a/src/node_stat_watcher.cc b/src/node_stat_watcher.cc index 0d67eceed54..05c540bbff1 100644 --- a/src/node_stat_watcher.cc +++ b/src/node_stat_watcher.cc @@ -47,7 +47,8 @@ void StatWatcher::Initialize(Environment* env, Local target) { HandleScope scope(env->isolate()); Local t = env->NewFunctionTemplate(StatWatcher::New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + StatWatcher::kInternalFieldCount); Local statWatcherString = FIXED_ONE_BYTE_STRING(env->isolate(), "StatWatcher"); t->SetClassName(statWatcherString); diff --git a/src/node_trace_events.cc b/src/node_trace_events.cc index f5852076b4e..9adee9e458c 100644 --- a/src/node_trace_events.cc +++ b/src/node_trace_events.cc @@ -129,7 +129,8 @@ void NodeCategorySet::Initialize(Local target, Local category_set = env->NewFunctionTemplate(NodeCategorySet::New); - category_set->InstanceTemplate()->SetInternalFieldCount(1); + category_set->InstanceTemplate()->SetInternalFieldCount( + NodeCategorySet::kInternalFieldCount); env->SetProtoMethod(category_set, "enable", NodeCategorySet::Enable); env->SetProtoMethod(category_set, "disable", NodeCategorySet::Disable); diff --git a/src/node_util.cc b/src/node_util.cc index e0ef7d421ff..db9b8ec8d65 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -323,7 +323,8 @@ void Initialize(Local target, FIXED_ONE_BYTE_STRING(env->isolate(), "WeakReference"); Local weak_ref = env->NewFunctionTemplate(WeakReference::New); - weak_ref->InstanceTemplate()->SetInternalFieldCount(1); + weak_ref->InstanceTemplate()->SetInternalFieldCount( + WeakReference::kInternalFieldCount); weak_ref->SetClassName(weak_ref_string); env->SetProtoMethod(weak_ref, "get", WeakReference::Get); env->SetProtoMethod(weak_ref, "incRef", WeakReference::IncRef); diff --git a/src/node_v8.cc b/src/node_v8.cc index d1fb3666d1a..00bfaf796b1 100644 --- a/src/node_v8.cc +++ b/src/node_v8.cc @@ -218,19 +218,19 @@ void Initialize(Local target, // Heap space names are extracted once and exposed to JavaScript to // avoid excessive creation of heap space name Strings. HeapSpaceStatistics s; - const Local heap_spaces = Array::New(env->isolate(), - number_of_heap_spaces); + MaybeStackBuffer, 16> heap_spaces(number_of_heap_spaces); for (size_t i = 0; i < number_of_heap_spaces; i++) { env->isolate()->GetHeapSpaceStatistics(&s, i); - Local heap_space_name = String::NewFromUtf8(env->isolate(), - s.space_name(), - NewStringType::kNormal) - .ToLocalChecked(); - heap_spaces->Set(env->context(), i, heap_space_name).Check(); + heap_spaces[i] = String::NewFromUtf8(env->isolate(), + s.space_name(), + NewStringType::kNormal) + .ToLocalChecked(); } target->Set(env->context(), FIXED_ONE_BYTE_STRING(env->isolate(), "kHeapSpaces"), - heap_spaces).Check(); + Array::New(env->isolate(), + heap_spaces.out(), + number_of_heap_spaces)).Check(); env->SetMethod(target, "updateHeapSpaceStatisticsArrayBuffer", diff --git a/src/node_version.h b/src/node_version.h index ebd3cff606f..8f14c6dbfae 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -93,6 +93,6 @@ // The NAPI_VERSION provided by this version of the runtime. This is the version // which the Node binary being built supports. -#define NAPI_VERSION 5 +#define NAPI_VERSION 6 #endif // SRC_NODE_VERSION_H_ diff --git a/src/node_wasi.cc b/src/node_wasi.cc index 20872c58d60..ed8f6c4fa4c 100644 --- a/src/node_wasi.cc +++ b/src/node_wasi.cc @@ -1810,7 +1810,7 @@ static void Initialize(Local target, Local tmpl = env->NewFunctionTemplate(WASI::New); auto wasi_wrap_string = FIXED_ONE_BYTE_STRING(env->isolate(), "WASI"); - tmpl->InstanceTemplate()->SetInternalFieldCount(1); + tmpl->InstanceTemplate()->SetInternalFieldCount(WASI::kInternalFieldCount); tmpl->SetClassName(wasi_wrap_string); env->SetProtoMethod(tmpl, "args_get", WASI::ArgsGet); diff --git a/src/node_watchdog.cc b/src/node_watchdog.cc index 4cc75c31604..22b09e83b79 100644 --- a/src/node_watchdog.cc +++ b/src/node_watchdog.cc @@ -34,6 +34,7 @@ namespace node { using v8::Context; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; +using v8::Local; using v8::Object; using v8::Value; @@ -121,7 +122,8 @@ SignalPropagation SigintWatchdog::HandleSigint() { void TraceSigintWatchdog::Init(Environment* env, Local target) { Local constructor = env->NewFunctionTemplate(New); - constructor->InstanceTemplate()->SetInternalFieldCount(1); + constructor->InstanceTemplate()->SetInternalFieldCount( + TraceSigintWatchdog::kInternalFieldCount); Local js_sigint_watch_dog = FIXED_ONE_BYTE_STRING(env->isolate(), "TraceSigintWatchdog"); constructor->SetClassName(js_sigint_watch_dog); diff --git a/src/node_watchdog.h b/src/node_watchdog.h index ec44c09f517..8bccfceef3c 100644 --- a/src/node_watchdog.h +++ b/src/node_watchdog.h @@ -83,10 +83,10 @@ class SigintWatchdog : public SigintWatchdogBase { class TraceSigintWatchdog : public HandleWrap, public SigintWatchdogBase { public: - static void Init(Environment* env, Local target); + static void Init(Environment* env, v8::Local target); static void New(const v8::FunctionCallbackInfo& args); - static void Start(const v8::FunctionCallbackInfo& args); - static void Stop(const v8::FunctionCallbackInfo& args); + static void Start(const v8::FunctionCallbackInfo& args); + static void Stop(const v8::FunctionCallbackInfo& args); SignalPropagation HandleSigint() override; @@ -99,7 +99,7 @@ class TraceSigintWatchdog : public HandleWrap, public SigintWatchdogBase { private: enum class SignalFlags { None, FromIdle, FromInterrupt }; - TraceSigintWatchdog(Environment* env, Local object); + TraceSigintWatchdog(Environment* env, v8::Local object); void HandleInterrupt(); bool interrupting = false; diff --git a/src/node_worker.cc b/src/node_worker.cc index 22c69ea6fb0..6ff1a0afe99 100644 --- a/src/node_worker.cc +++ b/src/node_worker.cc @@ -31,6 +31,7 @@ using v8::Integer; using v8::Isolate; using v8::Local; using v8::Locker; +using v8::Maybe; using v8::MaybeLocal; using v8::Null; using v8::Number; @@ -134,7 +135,16 @@ class WorkerThreadData { public: explicit WorkerThreadData(Worker* w) : w_(w) { - CHECK_EQ(uv_loop_init(&loop_), 0); + int ret = uv_loop_init(&loop_); + if (ret != 0) { + char err_buf[128]; + uv_err_name_r(ret, err_buf, sizeof(err_buf)); + w->custom_error_ = "ERR_WORKER_INIT_FAILED"; + w->custom_error_str_ = err_buf; + w->loop_init_failed_ = true; + w->stopped_ = true; + return; + } std::shared_ptr allocator = ArrayBufferAllocator::Create(); @@ -147,6 +157,8 @@ class WorkerThreadData { Isolate* isolate = Isolate::Allocate(); if (isolate == nullptr) { w->custom_error_ = "ERR_WORKER_OUT_OF_MEMORY"; + w->custom_error_str_ = "Failed to create new Isolate"; + w->stopped_ = true; return; } @@ -204,11 +216,14 @@ class WorkerThreadData { isolate->Dispose(); // Wait until the platform has cleaned up all relevant resources. - while (!platform_finished) + while (!platform_finished) { + CHECK(!w_->loop_init_failed_); uv_run(&loop_, UV_RUN_ONCE); + } + } + if (!w_->loop_init_failed_) { + CheckedUvLoopClose(&loop_); } - - CheckedUvLoopClose(&loop_); } private: @@ -223,6 +238,7 @@ size_t Worker::NearHeapLimit(void* data, size_t current_heap_limit, size_t initial_heap_limit) { Worker* worker = static_cast(data); worker->custom_error_ = "ERR_WORKER_OUT_OF_MEMORY"; + worker->custom_error_str_ = "JS heap out of memory"; worker->Exit(1); // Give the current GC some extra leeway to let it finish rather than // crash hard. We are not going to perform further allocations anyway. @@ -242,6 +258,7 @@ void Worker::Run() { WorkerThreadData data(this); if (isolate_ == nullptr) return; + CHECK(!data.w_->loop_init_failed_); Debug(this, "Starting worker with id %llu", thread_id_); { @@ -287,9 +304,8 @@ void Worker::Run() { TryCatch try_catch(isolate_); context = NewContext(isolate_); if (context.IsEmpty()) { - // TODO(addaleax): Inform the target about the actual underlying - // failure. custom_error_ = "ERR_WORKER_OUT_OF_MEMORY"; + custom_error_str_ = "Failed to create new Context"; return; } } @@ -326,11 +342,6 @@ void Worker::Run() { env_->InitializeInspector(std::move(inspector_parent_handle_)); #endif HandleScope handle_scope(isolate_); - InternalCallbackScope callback_scope( - env_.get(), - Object::New(isolate_), - { 1, 0 }, - InternalCallbackScope::kSkipAsyncHooks); if (!env_->RunBootstrapping().IsEmpty()) { CreateEnvMessagePort(env_.get()); @@ -417,10 +428,14 @@ void Worker::JoinThread() { Undefined(env()->isolate())).Check(); Local args[] = { - Integer::New(env()->isolate(), exit_code_), - custom_error_ != nullptr ? - OneByteString(env()->isolate(), custom_error_).As() : - Null(env()->isolate()).As(), + Integer::New(env()->isolate(), exit_code_), + custom_error_ != nullptr + ? OneByteString(env()->isolate(), custom_error_).As() + : Null(env()->isolate()).As(), + !custom_error_str_.empty() + ? OneByteString(env()->isolate(), custom_error_str_.c_str()) + .As() + : Null(env()->isolate()).As(), }; MakeCallback(env()->onexit_string(), arraysize(args), args); @@ -482,14 +497,9 @@ void Worker::New(const FunctionCallbackInfo& args) { if (args[1]->IsObject() || args[2]->IsArray()) { per_isolate_opts.reset(new PerIsolateOptions()); - HandleEnvOptions( - per_isolate_opts->per_env, [isolate, &env_vars](const char* name) { - MaybeLocal value = - env_vars->Get(isolate, OneByteString(isolate, name)); - return value.IsEmpty() ? std::string{} - : std::string(*String::Utf8Value( - isolate, value.ToLocalChecked())); - }); + HandleEnvOptions(per_isolate_opts->per_env, [&env_vars](const char* name) { + return env_vars->Get(name).FromMaybe(""); + }); #ifndef NODE_WITHOUT_NODE_OPTIONS MaybeLocal maybe_node_opts = @@ -762,7 +772,8 @@ void InitWorker(Local target, { Local w = env->NewFunctionTemplate(Worker::New); - w->InstanceTemplate()->SetInternalFieldCount(1); + w->InstanceTemplate()->SetInternalFieldCount( + Worker::kInternalFieldCount); w->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(w, "startThread", Worker::StartThread); @@ -783,7 +794,8 @@ void InitWorker(Local target, { Local wst = FunctionTemplate::New(env->isolate()); - wst->InstanceTemplate()->SetInternalFieldCount(1); + wst->InstanceTemplate()->SetInternalFieldCount( + WorkerHeapSnapshotTaker::kInternalFieldCount); wst->Inherit(AsyncWrap::GetConstructorTemplate(env)); Local wst_string = diff --git a/src/node_worker.h b/src/node_worker.h index 0c6fd35c0ab..dbd28610994 100644 --- a/src/node_worker.h +++ b/src/node_worker.h @@ -85,6 +85,8 @@ class Worker : public AsyncWrap { bool thread_joined_ = true; const char* custom_error_ = nullptr; + std::string custom_error_str_; + bool loop_init_failed_ = false; int exit_code_ = 0; uint64_t thread_id_ = -1; uintptr_t stack_base_ = 0; diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 1b7fd788b95..eacd710143a 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -1216,7 +1216,8 @@ struct MakeClass { static void Make(Environment* env, Local target, const char* name) { Local z = env->NewFunctionTemplate(Stream::New); - z->InstanceTemplate()->SetInternalFieldCount(1); + z->InstanceTemplate()->SetInternalFieldCount( + Stream::kInternalFieldCount); z->Inherit(AsyncWrap::GetConstructorTemplate(env)); env->SetProtoMethod(z, "write", Stream::template Write); diff --git a/src/pipe_wrap.cc b/src/pipe_wrap.cc index c2be4320387..c4a5b7cd62e 100644 --- a/src/pipe_wrap.cc +++ b/src/pipe_wrap.cc @@ -74,7 +74,7 @@ void PipeWrap::Initialize(Local target, Local pipeString = FIXED_ONE_BYTE_STRING(env->isolate(), "Pipe"); t->SetClassName(pipeString); t->InstanceTemplate() - ->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + ->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env)); diff --git a/src/process_wrap.cc b/src/process_wrap.cc index a75f271d1c7..1e7de56c6d1 100644 --- a/src/process_wrap.cc +++ b/src/process_wrap.cc @@ -52,7 +52,8 @@ class ProcessWrap : public HandleWrap { void* priv) { Environment* env = Environment::GetCurrent(context); Local constructor = env->NewFunctionTemplate(New); - constructor->InstanceTemplate()->SetInternalFieldCount(1); + constructor->InstanceTemplate()->SetInternalFieldCount( + ProcessWrap::kInternalFieldCount); Local processString = FIXED_ONE_BYTE_STRING(env->isolate(), "Process"); constructor->SetClassName(processString); diff --git a/src/signal_wrap.cc b/src/signal_wrap.cc index bc2d9f1e355..2be7ac98341 100644 --- a/src/signal_wrap.cc +++ b/src/signal_wrap.cc @@ -53,7 +53,8 @@ class SignalWrap : public HandleWrap { void* priv) { Environment* env = Environment::GetCurrent(context); Local constructor = env->NewFunctionTemplate(New); - constructor->InstanceTemplate()->SetInternalFieldCount(1); + constructor->InstanceTemplate()->SetInternalFieldCount( + SignalWrap::kInternalFieldCount); Local signalString = FIXED_ONE_BYTE_STRING(env->isolate(), "Signal"); constructor->SetClassName(signalString); diff --git a/src/spawn_sync.cc b/src/spawn_sync.cc index 3b277ad70ad..589b77f6c1e 100644 --- a/src/spawn_sync.cc +++ b/src/spawn_sync.cc @@ -721,18 +721,18 @@ Local SyncProcessRunner::BuildOutputArray() { CHECK(!stdio_pipes_.empty()); EscapableHandleScope scope(env()->isolate()); - Local context = env()->context(); - Local js_output = Array::New(env()->isolate(), stdio_count_); + MaybeStackBuffer, 8> js_output(stdio_pipes_.size()); for (uint32_t i = 0; i < stdio_pipes_.size(); i++) { SyncProcessStdioPipe* h = stdio_pipes_[i].get(); if (h != nullptr && h->writable()) - js_output->Set(context, i, h->GetOutputAsBuffer(env())).Check(); + js_output[i] = h->GetOutputAsBuffer(env()); else - js_output->Set(context, i, Null(env()->isolate())).Check(); + js_output[i] = Null(env()->isolate()); } - return scope.Escape(js_output); + return scope.Escape( + Array::New(env()->isolate(), js_output.out(), js_output.length())); } Maybe SyncProcessRunner::ParseOptions(Local js_value) { diff --git a/src/stream_base-inl.h b/src/stream_base-inl.h index f89eb3a5287..27a9a01c7c2 100644 --- a/src/stream_base-inl.h +++ b/src/stream_base-inl.h @@ -23,18 +23,22 @@ using v8::String; using v8::Value; inline void StreamReq::AttachToObject(v8::Local req_wrap_obj) { - CHECK_EQ(req_wrap_obj->GetAlignedPointerFromInternalField(kStreamReqField), + CHECK_EQ(req_wrap_obj->GetAlignedPointerFromInternalField( + StreamReq::kStreamReqField), nullptr); - req_wrap_obj->SetAlignedPointerInInternalField(kStreamReqField, this); + req_wrap_obj->SetAlignedPointerInInternalField( + StreamReq::kStreamReqField, this); } inline StreamReq* StreamReq::FromObject(v8::Local req_wrap_obj) { return static_cast( - req_wrap_obj->GetAlignedPointerFromInternalField(kStreamReqField)); + req_wrap_obj->GetAlignedPointerFromInternalField( + StreamReq::kStreamReqField)); } inline void StreamReq::Dispose() { - object()->SetAlignedPointerInInternalField(kStreamReqField, nullptr); + object()->SetAlignedPointerInInternalField( + StreamReq::kStreamReqField, nullptr); delete this; } @@ -261,15 +265,17 @@ inline WriteWrap* StreamBase::CreateWriteWrap( } inline void StreamBase::AttachToObject(v8::Local obj) { - obj->SetAlignedPointerInInternalField(kStreamBaseField, this); + obj->SetAlignedPointerInInternalField( + StreamBase::kStreamBaseField, this); } inline StreamBase* StreamBase::FromObject(v8::Local obj) { - if (obj->GetAlignedPointerFromInternalField(0) == nullptr) + if (obj->GetAlignedPointerFromInternalField(StreamBase::kSlot) == nullptr) return nullptr; return static_cast( - obj->GetAlignedPointerFromInternalField(kStreamBaseField)); + obj->GetAlignedPointerFromInternalField( + StreamBase::kStreamBaseField)); } @@ -304,7 +310,7 @@ inline void StreamReq::Done(int status, const char* error_str) { inline void StreamReq::ResetObject(v8::Local obj) { DCHECK_GT(obj->InternalFieldCount(), StreamReq::kStreamReqField); - obj->SetAlignedPointerInInternalField(0, nullptr); // BaseObject field. + obj->SetAlignedPointerInInternalField(StreamReq::kSlot, nullptr); obj->SetAlignedPointerInInternalField(StreamReq::kStreamReqField, nullptr); } diff --git a/src/stream_base.cc b/src/stream_base.cc index eaccfc995c7..28a3bf65fc0 100644 --- a/src/stream_base.cc +++ b/src/stream_base.cc @@ -340,7 +340,8 @@ MaybeLocal StreamBase::CallJSOnreadMethod(ssize_t nread, AsyncWrap* wrap = GetAsyncWrap(); CHECK_NOT_NULL(wrap); - Local onread = wrap->object()->GetInternalField(kOnReadFunctionField); + Local onread = wrap->object()->GetInternalField( + StreamBase::kOnReadFunctionField); CHECK(onread->IsFunction()); return wrap->MakeCallback(onread.As(), arraysize(argv), argv); } @@ -409,8 +410,11 @@ void StreamBase::AddMethods(Environment* env, Local t) { True(env->isolate())); t->PrototypeTemplate()->SetAccessor( FIXED_ONE_BYTE_STRING(env->isolate(), "onread"), - BaseObject::InternalFieldGet, - BaseObject::InternalFieldSet); + BaseObject::InternalFieldGet< + StreamBase::kOnReadFunctionField>, + BaseObject::InternalFieldSet< + StreamBase::kOnReadFunctionField, + &Value::IsFunction>); } void StreamBase::GetFD(const FunctionCallbackInfo& args) { diff --git a/src/stream_base.h b/src/stream_base.h index 3df9e99f6e4..15b83ec91f6 100644 --- a/src/stream_base.h +++ b/src/stream_base.h @@ -29,7 +29,14 @@ using JSMethodFunction = void(const v8::FunctionCallbackInfo& args); class StreamReq { public: - static constexpr int kStreamReqField = 1; + // The kSlot internal field here mirrors BaseObject::InternalFields::kSlot + // here because instances derived from StreamReq will also derive from + // BaseObject, and the slots are used for the identical purpose. + enum InternalFields { + kSlot = BaseObject::kSlot, + kStreamReqField = BaseObject::kInternalFieldCount, + kInternalFieldCount + }; explicit StreamReq(StreamBase* stream, v8::Local req_wrap_obj) : stream_(stream) { @@ -275,10 +282,15 @@ class StreamResource { class StreamBase : public StreamResource { public: - // 0 is reserved for the BaseObject pointer. - static constexpr int kStreamBaseField = 1; - static constexpr int kOnReadFunctionField = 2; - static constexpr int kStreamBaseFieldCount = 3; + // The kSlot field here mirrors that of BaseObject::InternalFields::kSlot + // because instances deriving from StreamBase generally also derived from + // BaseObject (it's possible for it not to, however). + enum InternalFields { + kSlot = BaseObject::kSlot, + kStreamBaseField = BaseObject::kInternalFieldCount, + kOnReadFunctionField, + kInternalFieldCount + }; static void AddMethods(Environment* env, v8::Local target); diff --git a/src/stream_pipe.cc b/src/stream_pipe.cc index d405c4d5cbe..40b094ab593 100644 --- a/src/stream_pipe.cc +++ b/src/stream_pipe.cc @@ -25,7 +25,7 @@ StreamPipe::StreamPipe(StreamBase* source, source->PushStreamListener(&readable_listener_); sink->PushStreamListener(&writable_listener_); - CHECK(sink->HasWantsWrite()); + uses_wants_write_ = sink->HasWantsWrite(); // Set up links between this object and the source/sink objects. // In particular, this makes sure that they are garbage collected as a group, @@ -66,7 +66,8 @@ void StreamPipe::Unpipe(bool is_in_deletion) { is_closed_ = true; is_reading_ = false; source()->RemoveStreamListener(&readable_listener_); - sink()->RemoveStreamListener(&writable_listener_); + if (pending_writes_ == 0) + sink()->RemoveStreamListener(&writable_listener_); if (is_in_deletion) return; @@ -126,13 +127,16 @@ void StreamPipe::ReadableListener::OnStreamRead(ssize_t nread, // EOF or error; stop reading and pass the error to the previous listener // (which might end up in JS). pipe->is_eof_ = true; + // Cache `sink()` here because the previous listener might do things + // that eventually lead to an `Unpipe()` call. + StreamBase* sink = pipe->sink(); stream()->ReadStop(); CHECK_NOT_NULL(previous_listener_); previous_listener_->OnStreamRead(nread, uv_buf_init(nullptr, 0)); // If we’re not writing, close now. Otherwise, we’ll do that in // `OnStreamAfterWrite()`. - if (!pipe->is_writing_) { - pipe->ShutdownWritable(); + if (pipe->pending_writes_ == 0) { + sink->Shutdown(); pipe->Unpipe(); } return; @@ -142,12 +146,13 @@ void StreamPipe::ReadableListener::OnStreamRead(ssize_t nread, } void StreamPipe::ProcessData(size_t nread, AllocatedBuffer&& buf) { + CHECK(uses_wants_write_ || pending_writes_ == 0); uv_buf_t buffer = uv_buf_init(buf.data(), nread); StreamWriteResult res = sink()->Write(&buffer, 1); + pending_writes_++; if (!res.async) { writable_listener_.OnStreamAfterWrite(nullptr, res.err); } else { - is_writing_ = true; is_reading_ = false; res.wrap->SetAllocatedStorage(std::move(buf)); if (source() != nullptr) @@ -155,19 +160,26 @@ void StreamPipe::ProcessData(size_t nread, AllocatedBuffer&& buf) { } } -void StreamPipe::ShutdownWritable() { - sink()->Shutdown(); -} - void StreamPipe::WritableListener::OnStreamAfterWrite(WriteWrap* w, int status) { StreamPipe* pipe = ContainerOf(&StreamPipe::writable_listener_, this); - pipe->is_writing_ = false; + pipe->pending_writes_--; + if (pipe->is_closed_) { + if (pipe->pending_writes_ == 0) { + Environment* env = pipe->env(); + HandleScope handle_scope(env->isolate()); + Context::Scope context_scope(env->context()); + pipe->MakeCallback(env->oncomplete_string(), 0, nullptr).ToLocalChecked(); + stream()->RemoveStreamListener(this); + } + return; + } + if (pipe->is_eof_) { HandleScope handle_scope(pipe->env()->isolate()); InternalCallbackScope callback_scope(pipe, InternalCallbackScope::kSkipTaskQueues); - pipe->ShutdownWritable(); + pipe->sink()->Shutdown(); pipe->Unpipe(); return; } @@ -179,6 +191,10 @@ void StreamPipe::WritableListener::OnStreamAfterWrite(WriteWrap* w, prev->OnStreamAfterWrite(w, status); return; } + + if (!pipe->uses_wants_write_) { + OnStreamWantsWrite(65536); + } } void StreamPipe::WritableListener::OnStreamAfterShutdown(ShutdownWrap* w, @@ -202,6 +218,7 @@ void StreamPipe::WritableListener::OnStreamDestroy() { StreamPipe* pipe = ContainerOf(&StreamPipe::writable_listener_, this); pipe->sink_destroyed_ = true; pipe->is_eof_ = true; + pipe->pending_writes_ = 0; pipe->Unpipe(); } @@ -242,8 +259,7 @@ void StreamPipe::Start(const FunctionCallbackInfo& args) { StreamPipe* pipe; ASSIGN_OR_RETURN_UNWRAP(&pipe, args.Holder()); pipe->is_closed_ = false; - if (pipe->wanted_data_ > 0) - pipe->writable_listener_.OnStreamWantsWrite(pipe->wanted_data_); + pipe->writable_listener_.OnStreamWantsWrite(65536); } void StreamPipe::Unpipe(const FunctionCallbackInfo& args) { @@ -252,6 +268,18 @@ void StreamPipe::Unpipe(const FunctionCallbackInfo& args) { pipe->Unpipe(); } +void StreamPipe::IsClosed(const FunctionCallbackInfo& args) { + StreamPipe* pipe; + ASSIGN_OR_RETURN_UNWRAP(&pipe, args.Holder()); + args.GetReturnValue().Set(pipe->is_closed_); +} + +void StreamPipe::PendingWrites(const FunctionCallbackInfo& args) { + StreamPipe* pipe; + ASSIGN_OR_RETURN_UNWRAP(&pipe, args.Holder()); + args.GetReturnValue().Set(pipe->pending_writes_); +} + namespace { void InitializeStreamPipe(Local target, @@ -266,9 +294,12 @@ void InitializeStreamPipe(Local target, FIXED_ONE_BYTE_STRING(env->isolate(), "StreamPipe"); env->SetProtoMethod(pipe, "unpipe", StreamPipe::Unpipe); env->SetProtoMethod(pipe, "start", StreamPipe::Start); + env->SetProtoMethod(pipe, "isClosed", StreamPipe::IsClosed); + env->SetProtoMethod(pipe, "pendingWrites", StreamPipe::PendingWrites); pipe->Inherit(AsyncWrap::GetConstructorTemplate(env)); pipe->SetClassName(stream_pipe_string); - pipe->InstanceTemplate()->SetInternalFieldCount(1); + pipe->InstanceTemplate()->SetInternalFieldCount( + StreamPipe::kInternalFieldCount); target ->Set(context, stream_pipe_string, pipe->GetFunction(context).ToLocalChecked()) diff --git a/src/stream_pipe.h b/src/stream_pipe.h index 0e155006102..e22abab0115 100644 --- a/src/stream_pipe.h +++ b/src/stream_pipe.h @@ -17,6 +17,8 @@ class StreamPipe : public AsyncWrap { static void New(const v8::FunctionCallbackInfo& args); static void Start(const v8::FunctionCallbackInfo& args); static void Unpipe(const v8::FunctionCallbackInfo& args); + static void IsClosed(const v8::FunctionCallbackInfo& args); + static void PendingWrites(const v8::FunctionCallbackInfo& args); SET_NO_MEMORY_INFO() SET_MEMORY_INFO_NAME(StreamPipe) @@ -26,14 +28,13 @@ class StreamPipe : public AsyncWrap { inline StreamBase* source(); inline StreamBase* sink(); - inline void ShutdownWritable(); - + int pending_writes_ = 0; bool is_reading_ = false; - bool is_writing_ = false; bool is_eof_ = false; bool is_closed_ = true; bool sink_destroyed_ = false; bool source_destroyed_ = false; + bool uses_wants_write_ = false; // Set a default value so that when we’re coming from Start(), we know // that we don’t want to read just yet. diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index 21b775401e4..7548516e477 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -64,8 +64,7 @@ void LibuvStreamWrap::Initialize(Local target, }; Local sw = FunctionTemplate::New(env->isolate(), is_construct_call_callback); - sw->InstanceTemplate()->SetInternalFieldCount( - StreamReq::kStreamReqField + 1 + 3); + sw->InstanceTemplate()->SetInternalFieldCount(StreamReq::kInternalFieldCount); Local wrapString = FIXED_ONE_BYTE_STRING(env->isolate(), "ShutdownWrap"); sw->SetClassName(wrapString); @@ -94,7 +93,8 @@ void LibuvStreamWrap::Initialize(Local target, Local ww = FunctionTemplate::New(env->isolate(), is_construct_call_callback); - ww->InstanceTemplate()->SetInternalFieldCount(StreamReq::kStreamReqField + 1); + ww->InstanceTemplate()->SetInternalFieldCount( + StreamReq::kInternalFieldCount); Local writeWrapString = FIXED_ONE_BYTE_STRING(env->isolate(), "WriteWrap"); ww->SetClassName(writeWrapString); @@ -136,7 +136,7 @@ Local LibuvStreamWrap::GetConstructorTemplate( FIXED_ONE_BYTE_STRING(env->isolate(), "LibuvStreamWrap")); tmpl->Inherit(HandleWrap::GetConstructorTemplate(env)); tmpl->InstanceTemplate()->SetInternalFieldCount( - StreamBase::kStreamBaseFieldCount); + StreamBase::kInternalFieldCount); Local get_write_queue_size = FunctionTemplate::New(env->isolate(), GetWriteQueueSize, diff --git a/src/string_bytes.cc b/src/string_bytes.cc index f8d7243e5d6..7ee87a8ebe8 100644 --- a/src/string_bytes.cc +++ b/src/string_bytes.cc @@ -587,7 +587,11 @@ static void force_ascii(const char* src, char* dst, size_t len) { } -static size_t hex_encode(const char* src, size_t slen, char* dst, size_t dlen) { +size_t StringBytes::hex_encode( + const char* src, + size_t slen, + char* dst, + size_t dlen) { // We know how much we'll write, just make sure that there's space. CHECK(dlen >= slen * 2 && "not enough space provided for hex encode"); @@ -603,6 +607,12 @@ static size_t hex_encode(const char* src, size_t slen, char* dst, size_t dlen) { return dlen; } +std::string StringBytes::hex_encode(const char* src, size_t slen) { + size_t dlen = slen * 2; + std::string dst(dlen, '\0'); + hex_encode(src, slen, &dst[0], dlen); + return dst; +} #define CHECK_BUFLEN_IN_RANGE(len) \ do { \ diff --git a/src/string_bytes.h b/src/string_bytes.h index 5ef05fc48cd..69bb828e018 100644 --- a/src/string_bytes.h +++ b/src/string_bytes.h @@ -29,6 +29,8 @@ #include "v8.h" #include "env-inl.h" +#include + namespace node { class StringBytes { @@ -97,6 +99,13 @@ class StringBytes { enum encoding encoding, v8::Local* error); + static size_t hex_encode(const char* src, + size_t slen, + char* dst, + size_t dlen); + + static std::string hex_encode(const char* src, size_t slen); + private: static size_t WriteUCS2(v8::Isolate* isolate, char* buf, diff --git a/src/tcp_wrap.cc b/src/tcp_wrap.cc index 89c4e215bbe..1aca3a5e6ae 100644 --- a/src/tcp_wrap.cc +++ b/src/tcp_wrap.cc @@ -77,8 +77,7 @@ void TCPWrap::Initialize(Local target, Local t = env->NewFunctionTemplate(New); Local tcpString = FIXED_ONE_BYTE_STRING(env->isolate(), "TCP"); t->SetClassName(tcpString); - t->InstanceTemplate() - ->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount); // Init properties t->InstanceTemplate()->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "reading"), diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index 82274fde6db..39dcf532a9f 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -587,6 +587,7 @@ void TLSWrap::ClearIn() { AllocatedBuffer data = std::move(pending_cleartext_input_); crypto::MarkPopErrorOnReturn mark_pop_error_on_return; + crypto::NodeBIO::FromBIO(enc_out_)->set_allocate_tls_hint(data.size()); int written = SSL_write(ssl_.get(), data.data(), data.size()); Debug(this, "Writing %zu bytes, written = %d", data.size(), written); CHECK(written == -1 || written == static_cast(data.size())); @@ -701,8 +702,15 @@ int TLSWrap::DoWrite(WriteWrap* w, size_t length = 0; size_t i; - for (i = 0; i < count; i++) + size_t nonempty_i = 0; + size_t nonempty_count = 0; + for (i = 0; i < count; i++) { length += bufs[i].len; + if (bufs[i].len > 0) { + nonempty_i = i; + nonempty_count += 1; + } + } // We want to trigger a Write() on the underlying stream to drive the stream // system, but don't want to encrypt empty buffers into a TLS frame, so see @@ -747,20 +755,34 @@ int TLSWrap::DoWrite(WriteWrap* w, crypto::MarkPopErrorOnReturn mark_pop_error_on_return; int written = 0; - if (count != 1) { + + // It is common for zero length buffers to be written, + // don't copy data if there there is one buffer with data + // and one or more zero length buffers. + // _http_outgoing.js writes a zero length buffer in + // in OutgoingMessage.prototype.end. If there was a large amount + // of data supplied to end() there is no sense allocating + // and copying it when it could just be used. + + if (nonempty_count != 1) { data = env()->AllocateManaged(length); size_t offset = 0; for (i = 0; i < count; i++) { memcpy(data.data() + offset, bufs[i].base, bufs[i].len); offset += bufs[i].len; } + + crypto::NodeBIO::FromBIO(enc_out_)->set_allocate_tls_hint(length); written = SSL_write(ssl_.get(), data.data(), length); } else { // Only one buffer: try to write directly, only store if it fails - written = SSL_write(ssl_.get(), bufs[0].base, bufs[0].len); + uv_buf_t* buf = &bufs[nonempty_i]; + crypto::NodeBIO::FromBIO(enc_out_)->set_allocate_tls_hint(buf->len); + written = SSL_write(ssl_.get(), buf->base, buf->len); + if (written == -1) { data = env()->AllocateManaged(length); - memcpy(data.data(), bufs[0].base, bufs[0].len); + memcpy(data.data(), buf->base, buf->len); } } @@ -1247,8 +1269,7 @@ void TLSWrap::Initialize(Local target, Local tlsWrapString = FIXED_ONE_BYTE_STRING(env->isolate(), "TLSWrap"); t->SetClassName(tlsWrapString); - t->InstanceTemplate() - ->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount); Local get_write_queue_size = FunctionTemplate::New(env->isolate(), diff --git a/src/tty_wrap.cc b/src/tty_wrap.cc index 7dface926e4..8536fae3ed7 100644 --- a/src/tty_wrap.cc +++ b/src/tty_wrap.cc @@ -51,8 +51,7 @@ void TTYWrap::Initialize(Local target, Local t = env->NewFunctionTemplate(New); t->SetClassName(ttyString); - t->InstanceTemplate() - ->SetInternalFieldCount(StreamBase::kStreamBaseFieldCount); + t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount); t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env)); env->SetProtoMethodNoSideEffect(t, "getWindowSize", TTYWrap::GetWindowSize); diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index 4a66ce0a1f1..277eb6b81ba 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -22,6 +22,7 @@ #include "udp_wrap.h" #include "env-inl.h" #include "node_buffer.h" +#include "node_sockaddr-inl.h" #include "handle_wrap.h" #include "req_wrap-inl.h" #include "util-inl.h" @@ -69,18 +70,57 @@ SendWrap::SendWrap(Environment* env, } -inline bool SendWrap::have_callback() const { +bool SendWrap::have_callback() const { return have_callback_; } +UDPListener::~UDPListener() { + if (wrap_ != nullptr) + wrap_->set_listener(nullptr); +} + +UDPWrapBase::~UDPWrapBase() { + set_listener(nullptr); +} + +UDPListener* UDPWrapBase::listener() const { + CHECK_NOT_NULL(listener_); + return listener_; +} + +void UDPWrapBase::set_listener(UDPListener* listener) { + if (listener_ != nullptr) + listener_->wrap_ = nullptr; + listener_ = listener; + if (listener_ != nullptr) { + CHECK_NULL(listener_->wrap_); + listener_->wrap_ = this; + } +} + +UDPWrapBase* UDPWrapBase::FromObject(Local obj) { + CHECK_GT(obj->InternalFieldCount(), UDPWrapBase::kUDPWrapBaseField); + return static_cast( + obj->GetAlignedPointerFromInternalField(UDPWrapBase::kUDPWrapBaseField)); +} + +void UDPWrapBase::AddMethods(Environment* env, Local t) { + env->SetProtoMethod(t, "recvStart", RecvStart); + env->SetProtoMethod(t, "recvStop", RecvStop); +} UDPWrap::UDPWrap(Environment* env, Local object) : HandleWrap(env, object, reinterpret_cast(&handle_), AsyncWrap::PROVIDER_UDPWRAP) { + object->SetAlignedPointerInInternalField( + UDPWrapBase::kUDPWrapBaseField, static_cast(this)); + int r = uv_udp_init(env->event_loop(), &handle_); CHECK_EQ(r, 0); // can't fail anyway + + set_listener(this); } @@ -91,7 +131,8 @@ void UDPWrap::Initialize(Local target, Environment* env = Environment::GetCurrent(context); Local t = env->NewFunctionTemplate(New); - t->InstanceTemplate()->SetInternalFieldCount(1); + t->InstanceTemplate()->SetInternalFieldCount( + UDPWrapBase::kInternalFieldCount); Local udpString = FIXED_ONE_BYTE_STRING(env->isolate(), "UDP"); t->SetClassName(udpString); @@ -112,6 +153,7 @@ void UDPWrap::Initialize(Local target, Local(), attributes); + UDPWrapBase::AddMethods(env, t); env->SetProtoMethod(t, "open", Open); env->SetProtoMethod(t, "bind", Bind); env->SetProtoMethod(t, "connect", Connect); @@ -120,8 +162,6 @@ void UDPWrap::Initialize(Local target, env->SetProtoMethod(t, "connect6", Connect6); env->SetProtoMethod(t, "send6", Send6); env->SetProtoMethod(t, "disconnect", Disconnect); - env->SetProtoMethod(t, "recvStart", RecvStart); - env->SetProtoMethod(t, "recvStop", RecvStop); env->SetProtoMethod(t, "getpeername", GetSockOrPeerName); env->SetProtoMethod(t, "getsockname", @@ -220,6 +260,9 @@ void UDPWrap::DoBind(const FunctionCallbackInfo& args, int family) { flags); } + if (err == 0) + wrap->listener()->OnAfterBind(); + args.GetReturnValue().Set(err); } @@ -464,14 +507,10 @@ void UDPWrap::DoSend(const FunctionCallbackInfo& args, int family) { CHECK(args[3]->IsBoolean()); } - Local req_wrap_obj = args[0].As(); Local chunks = args[1].As(); // it is faster to fetch the length of the // array in js-land size_t count = args[2].As()->Value(); - const bool have_callback = sendto ? args[5]->IsTrue() : args[3]->IsTrue(); - - size_t msg_size = 0; MaybeStackBuffer bufs(count); @@ -482,7 +521,6 @@ void UDPWrap::DoSend(const FunctionCallbackInfo& args, int family) { size_t length = Buffer::Length(chunk); bufs[i] = uv_buf_init(Buffer::Data(chunk), length); - msg_size += length; } int err = 0; @@ -492,14 +530,36 @@ void UDPWrap::DoSend(const FunctionCallbackInfo& args, int family) { const unsigned short port = args[3].As()->Value(); node::Utf8Value address(env->isolate(), args[4]); err = sockaddr_for_family(family, address.out(), port, &addr_storage); - if (err == 0) { + if (err == 0) addr = reinterpret_cast(&addr_storage); - } } - uv_buf_t* bufs_ptr = *bufs; - if (err == 0 && !UNLIKELY(env->options()->test_udp_no_try_send)) { - err = uv_udp_try_send(&wrap->handle_, bufs_ptr, count, addr); + if (err == 0) { + wrap->current_send_req_wrap_ = args[0].As(); + wrap->current_send_has_callback_ = + sendto ? args[5]->IsTrue() : args[3]->IsTrue(); + + err = wrap->Send(*bufs, count, addr); + + wrap->current_send_req_wrap_.Clear(); + wrap->current_send_has_callback_ = false; + } + + args.GetReturnValue().Set(err); +} + +ssize_t UDPWrap::Send(uv_buf_t* bufs_ptr, + size_t count, + const sockaddr* addr) { + if (IsHandleClosing()) return UV_EBADF; + + size_t msg_size = 0; + for (size_t i = 0; i < count; i++) + msg_size += bufs_ptr[i].len; + + int err = 0; + if (!UNLIKELY(env()->options()->test_udp_no_try_send)) { + err = uv_udp_try_send(&handle_, bufs_ptr, count, addr); if (err == UV_ENOSYS || err == UV_EAGAIN) { err = 0; } else if (err >= 0) { @@ -517,28 +577,41 @@ void UDPWrap::DoSend(const FunctionCallbackInfo& args, int family) { CHECK_EQ(static_cast(err), msg_size); // + 1 so that the JS side can distinguish 0-length async sends from // 0-length sync sends. - args.GetReturnValue().Set(static_cast(msg_size) + 1); - return; + return msg_size + 1; } } } if (err == 0) { - AsyncHooks::DefaultTriggerAsyncIdScope trigger_scope(wrap); - SendWrap* req_wrap = new SendWrap(env, req_wrap_obj, have_callback); - req_wrap->msg_size = msg_size; - - err = req_wrap->Dispatch(uv_udp_send, - &wrap->handle_, - bufs_ptr, - count, - addr, - OnSend); + AsyncHooks::DefaultTriggerAsyncIdScope trigger_scope(this); + ReqWrap* req_wrap = listener()->CreateSendWrap(msg_size); + if (req_wrap == nullptr) return UV_ENOSYS; + + err = req_wrap->Dispatch( + uv_udp_send, + &handle_, + bufs_ptr, + count, + addr, + uv_udp_send_cb{[](uv_udp_send_t* req, int status) { + UDPWrap* self = ContainerOf(&UDPWrap::handle_, req->handle); + self->listener()->OnSendDone( + ReqWrap::from_req(req), status); + }}); if (err) delete req_wrap; } - args.GetReturnValue().Set(err); + return err; +} + + +ReqWrap* UDPWrap::CreateSendWrap(size_t msg_size) { + SendWrap* req_wrap = new SendWrap(env(), + current_send_req_wrap_, + current_send_has_callback_); + req_wrap->msg_size = msg_size; + return req_wrap; } @@ -552,31 +625,46 @@ void UDPWrap::Send6(const FunctionCallbackInfo& args) { } -void UDPWrap::RecvStart(const FunctionCallbackInfo& args) { - UDPWrap* wrap; - ASSIGN_OR_RETURN_UNWRAP(&wrap, - args.Holder(), - args.GetReturnValue().Set(UV_EBADF)); - int err = uv_udp_recv_start(&wrap->handle_, OnAlloc, OnRecv); +AsyncWrap* UDPWrap::GetAsyncWrap() { + return this; +} + +SocketAddress UDPWrap::GetPeerName() { + return SocketAddress::FromPeerName(handle_); +} + +SocketAddress UDPWrap::GetSockName() { + return SocketAddress::FromSockName(handle_); +} + +void UDPWrapBase::RecvStart(const FunctionCallbackInfo& args) { + UDPWrapBase* wrap = UDPWrapBase::FromObject(args.Holder()); + args.GetReturnValue().Set(wrap == nullptr ? UV_EBADF : wrap->RecvStart()); +} + +int UDPWrap::RecvStart() { + if (IsHandleClosing()) return UV_EBADF; + int err = uv_udp_recv_start(&handle_, OnAlloc, OnRecv); // UV_EALREADY means that the socket is already bound but that's okay if (err == UV_EALREADY) err = 0; - args.GetReturnValue().Set(err); + return err; } -void UDPWrap::RecvStop(const FunctionCallbackInfo& args) { - UDPWrap* wrap; - ASSIGN_OR_RETURN_UNWRAP(&wrap, - args.Holder(), - args.GetReturnValue().Set(UV_EBADF)); - int r = uv_udp_recv_stop(&wrap->handle_); - args.GetReturnValue().Set(r); +void UDPWrapBase::RecvStop(const FunctionCallbackInfo& args) { + UDPWrapBase* wrap = UDPWrapBase::FromObject(args.Holder()); + args.GetReturnValue().Set(wrap == nullptr ? UV_EBADF : wrap->RecvStop()); +} + +int UDPWrap::RecvStop() { + if (IsHandleClosing()) return UV_EBADF; + return uv_udp_recv_stop(&handle_); } -void UDPWrap::OnSend(uv_udp_send_t* req, int status) { - std::unique_ptr req_wrap{static_cast(req->data)}; +void UDPWrap::OnSendDone(ReqWrap* req, int status) { + std::unique_ptr req_wrap{static_cast(req)}; if (req_wrap->have_callback()) { Environment* env = req_wrap->env(); HandleScope handle_scope(env->isolate()); @@ -593,19 +681,30 @@ void UDPWrap::OnSend(uv_udp_send_t* req, int status) { void UDPWrap::OnAlloc(uv_handle_t* handle, size_t suggested_size, uv_buf_t* buf) { - UDPWrap* wrap = static_cast(handle->data); - *buf = wrap->env()->AllocateManaged(suggested_size).release(); + UDPWrap* wrap = ContainerOf(&UDPWrap::handle_, + reinterpret_cast(handle)); + *buf = wrap->listener()->OnAlloc(suggested_size); +} + +uv_buf_t UDPWrap::OnAlloc(size_t suggested_size) { + return env()->AllocateManaged(suggested_size).release(); } void UDPWrap::OnRecv(uv_udp_t* handle, ssize_t nread, - const uv_buf_t* buf_, - const struct sockaddr* addr, + const uv_buf_t* buf, + const sockaddr* addr, unsigned int flags) { - UDPWrap* wrap = static_cast(handle->data); - Environment* env = wrap->env(); + UDPWrap* wrap = ContainerOf(&UDPWrap::handle_, handle); + wrap->listener()->OnRecv(nread, *buf, addr, flags); +} - AllocatedBuffer buf(env, *buf_); +void UDPWrap::OnRecv(ssize_t nread, + const uv_buf_t& buf_, + const sockaddr* addr, + unsigned int flags) { + Environment* env = this->env(); + AllocatedBuffer buf(env, buf_); if (nread == 0 && addr == nullptr) { return; } @@ -613,23 +712,22 @@ void UDPWrap::OnRecv(uv_udp_t* handle, HandleScope handle_scope(env->isolate()); Context::Scope context_scope(env->context()); - Local wrap_obj = wrap->object(); Local argv[] = { Integer::New(env->isolate(), nread), - wrap_obj, + object(), Undefined(env->isolate()), Undefined(env->isolate()) }; if (nread < 0) { - wrap->MakeCallback(env->onmessage_string(), arraysize(argv), argv); + MakeCallback(env->onmessage_string(), arraysize(argv), argv); return; } buf.Resize(nread); argv[2] = buf.ToBuffer().ToLocalChecked(); argv[3] = AddressToJS(env, addr); - wrap->MakeCallback(env->onmessage_string(), arraysize(argv), argv); + MakeCallback(env->onmessage_string(), arraysize(argv), argv); } MaybeLocal UDPWrap::Instantiate(Environment* env, diff --git a/src/udp_wrap.h b/src/udp_wrap.h index 2026dd1dee1..6fed1d2dfea 100644 --- a/src/udp_wrap.h +++ b/src/udp_wrap.h @@ -25,14 +25,99 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "handle_wrap.h" +#include "req_wrap.h" +#include "node_sockaddr.h" #include "uv.h" #include "v8.h" namespace node { -class Environment; +class UDPWrapBase; -class UDPWrap: public HandleWrap { +// A listener that can be attached to an `UDPWrapBase` object and generally +// manages its I/O activity. This is similar to `StreamListener`. +class UDPListener { + public: + virtual ~UDPListener(); + + // Called right before data is received from the socket. Must return a + // buffer suitable for reading data into, that is then passed to OnRecv. + virtual uv_buf_t OnAlloc(size_t suggested_size) = 0; + + // Called right after data is received from the socket, and includes + // information about the source address. If `nread` is negative, an error + // has occurred, and it represents a libuv error code. + virtual void OnRecv(ssize_t nread, + const uv_buf_t& buf, + const sockaddr* addr, + unsigned int flags) = 0; + + // Called when an asynchronous request for writing data is created. + // The `msg_size` value contains the total size of the data to be sent, + // but may be ignored by the implementation of this Method. + // The return value is later passed to OnSendDone. + virtual ReqWrap* CreateSendWrap(size_t msg_size) = 0; + + // Called when an asynchronous request for writing data has finished. + // If status is negative, an error has occurred, and it represents a libuv + // error code. + virtual void OnSendDone(ReqWrap* wrap, int status) = 0; + + // Optional callback that is called after the socket has been bound. + virtual void OnAfterBind() {} + + inline UDPWrapBase* udp() const { return wrap_; } + + protected: + UDPWrapBase* wrap_ = nullptr; + + friend class UDPWrapBase; +}; + +class UDPWrapBase { + public: + // While UDPWrapBase itself does not extend from HandleWrap, classes + // derived from it will (like UDPWrap) + enum InternalFields { + kUDPWrapBaseField = HandleWrap::kInternalFieldCount, + kInternalFieldCount + }; + virtual ~UDPWrapBase(); + + // Start emitting OnAlloc() + OnRecv() events on the listener. + virtual int RecvStart() = 0; + + // Stop emitting OnAlloc() + OnRecv() events on the listener. + virtual int RecvStop() = 0; + + // Send a chunk of data over this socket. This may call CreateSendWrap() + // on the listener if an async transmission is necessary. + virtual ssize_t Send(uv_buf_t* bufs, + size_t nbufs, + const sockaddr* addr) = 0; + + virtual SocketAddress GetPeerName() = 0; + virtual SocketAddress GetSockName() = 0; + + // Returns an AsyncWrap object with the same lifetime as this object. + virtual AsyncWrap* GetAsyncWrap() = 0; + + void set_listener(UDPListener* listener); + UDPListener* listener() const; + + static UDPWrapBase* FromObject(v8::Local obj); + + static void RecvStart(const v8::FunctionCallbackInfo& args); + static void RecvStop(const v8::FunctionCallbackInfo& args); + static void AddMethods(Environment* env, v8::Local t); + + private: + UDPListener* listener_ = nullptr; +}; + +class UDPWrap final : public HandleWrap, + public UDPWrapBase, + public UDPListener { public: enum SocketType { SOCKET @@ -51,8 +136,6 @@ class UDPWrap: public HandleWrap { static void Connect6(const v8::FunctionCallbackInfo& args); static void Send6(const v8::FunctionCallbackInfo& args); static void Disconnect(const v8::FunctionCallbackInfo& args); - static void RecvStart(const v8::FunctionCallbackInfo& args); - static void RecvStop(const v8::FunctionCallbackInfo& args); static void AddMembership(const v8::FunctionCallbackInfo& args); static void DropMembership(const v8::FunctionCallbackInfo& args); static void AddSourceSpecificMembership( @@ -68,6 +151,27 @@ class UDPWrap: public HandleWrap { static void SetTTL(const v8::FunctionCallbackInfo& args); static void BufferSize(const v8::FunctionCallbackInfo& args); + // UDPListener implementation + uv_buf_t OnAlloc(size_t suggested_size) override; + void OnRecv(ssize_t nread, + const uv_buf_t& buf, + const sockaddr* addr, + unsigned int flags) override; + ReqWrap* CreateSendWrap(size_t msg_size) override; + void OnSendDone(ReqWrap* wrap, int status) override; + + // UDPWrapBase implementation + int RecvStart() override; + int RecvStop() override; + ssize_t Send(uv_buf_t* bufs, + size_t nbufs, + const sockaddr* addr) override; + + SocketAddress GetPeerName() override; + SocketAddress GetSockName() override; + + AsyncWrap* GetAsyncWrap() override; + static v8::MaybeLocal Instantiate(Environment* env, AsyncWrap* parent, SocketType type); @@ -99,7 +203,6 @@ class UDPWrap: public HandleWrap { static void OnAlloc(uv_handle_t* handle, size_t suggested_size, uv_buf_t* buf); - static void OnSend(uv_udp_send_t* req, int status); static void OnRecv(uv_udp_t* handle, ssize_t nread, const uv_buf_t* buf, @@ -107,6 +210,9 @@ class UDPWrap: public HandleWrap { unsigned int flags); uv_udp_t handle_; + + bool current_send_has_callback_; + v8::Local current_send_req_wrap_; }; } // namespace node diff --git a/test/async-hooks/test-async-exec-resource-http-32060.js b/test/async-hooks/test-async-exec-resource-http-32060.js new file mode 100644 index 00000000000..0ff68aa1070 --- /dev/null +++ b/test/async-hooks/test-async-exec-resource-http-32060.js @@ -0,0 +1,37 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { + executionAsyncResource, + executionAsyncId, + createHook, +} = require('async_hooks'); +const http = require('http'); + +const hooked = {}; +createHook({ + init(asyncId, type, triggerAsyncId, resource) { + hooked[asyncId] = resource; + } +}).enable(); + +const server = http.createServer((req, res) => { + res.write('hello'); + setTimeout(() => { + res.end(' world!'); + }, 1000); +}); + +server.listen(0, () => { + assert.strictEqual(executionAsyncResource(), hooked[executionAsyncId()]); + http.get({ port: server.address().port }, (res) => { + assert.strictEqual(executionAsyncResource(), hooked[executionAsyncId()]); + res.on('data', () => { + assert.strictEqual(executionAsyncResource(), hooked[executionAsyncId()]); + }); + res.on('end', () => { + assert.strictEqual(executionAsyncResource(), hooked[executionAsyncId()]); + server.close(); + }); + }); +}); diff --git a/test/async-hooks/test-async-exec-resource-match.js b/test/async-hooks/test-async-exec-resource-match.js new file mode 100644 index 00000000000..f5ea2c2b139 --- /dev/null +++ b/test/async-hooks/test-async-exec-resource-match.js @@ -0,0 +1,62 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { readFile } = require('fs'); +const { + createHook, + executionAsyncResource, + AsyncResource +} = require('async_hooks'); + +// Ignore any asyncIds created before our hook is active. +let firstSeenAsyncId = -1; +const idResMap = new Map(); +const numExpectedCalls = 5; + +createHook({ + init: common.mustCallAtLeast( + (asyncId, type, triggerId, resource) => { + if (firstSeenAsyncId === -1) { + firstSeenAsyncId = asyncId; + } + assert.ok(idResMap.get(asyncId) === undefined); + idResMap.set(asyncId, resource); + }, numExpectedCalls), + before(asyncId) { + if (asyncId >= firstSeenAsyncId) { + beforeHook(asyncId); + } + }, + after(asyncId) { + if (asyncId >= firstSeenAsyncId) { + afterHook(asyncId); + } + } +}).enable(); + +const beforeHook = common.mustCallAtLeast( + (asyncId) => { + const res = idResMap.get(asyncId); + assert.ok(res !== undefined); + const execRes = executionAsyncResource(); + assert.ok(execRes === res, 'resource mismatch in before'); + }, numExpectedCalls); + +const afterHook = common.mustCallAtLeast( + (asyncId) => { + const res = idResMap.get(asyncId); + assert.ok(res !== undefined); + const execRes = executionAsyncResource(); + assert.ok(execRes === res, 'resource mismatch in after'); + }, numExpectedCalls); + +const res = new AsyncResource('TheResource'); +const initRes = idResMap.get(res.asyncId()); +assert.ok(initRes === res, 'resource mismatch in init'); +res.runInAsyncScope(common.mustCall(() => { + const execRes = executionAsyncResource(); + assert.ok(execRes === res, 'resource mismatch in cb'); +})); + +readFile(__filename, common.mustCall()); diff --git a/test/async-hooks/test-async-local-storage-args.js b/test/async-hooks/test-async-local-storage-args.js new file mode 100644 index 00000000000..04316dff59d --- /dev/null +++ b/test/async-hooks/test-async-local-storage-args.js @@ -0,0 +1,20 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({}, (runArg) => { + assert.strictEqual(runArg, 1); + asyncLocalStorage.exit((exitArg) => { + assert.strictEqual(exitArg, 2); + }, 2); +}, 1); + +asyncLocalStorage.runSyncAndReturn({}, (runArg) => { + assert.strictEqual(runArg, 'foo'); + asyncLocalStorage.exitSyncAndReturn((exitArg) => { + assert.strictEqual(exitArg, 'bar'); + }, 'bar'); +}, 'foo'); diff --git a/test/async-hooks/test-async-local-storage-async-await.js b/test/async-hooks/test-async-local-storage-async-await.js new file mode 100644 index 00000000000..a03f803186b --- /dev/null +++ b/test/async-hooks/test-async-local-storage-async-await.js @@ -0,0 +1,19 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +async function test() { + asyncLocalStorage.getStore().set('foo', 'bar'); + await Promise.resolve(); + assert.strictEqual(asyncLocalStorage.getStore().get('foo'), 'bar'); +} + +async function main() { + await asyncLocalStorage.runSyncAndReturn(new Map(), test); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); +} + +main(); diff --git a/test/async-hooks/test-async-local-storage-async-functions.js b/test/async-hooks/test-async-local-storage-async-functions.js new file mode 100644 index 00000000000..a0852bc1098 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-async-functions.js @@ -0,0 +1,27 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +async function foo() {} + +const asyncLocalStorage = new AsyncLocalStorage(); + +async function testOut() { + await foo(); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); +} + +async function testAwait() { + await foo(); + assert.notStrictEqual(asyncLocalStorage.getStore(), undefined); + assert.strictEqual(asyncLocalStorage.getStore().get('key'), 'value'); + await asyncLocalStorage.exitSyncAndReturn(testOut); +} + +asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('key', 'value'); + testAwait(); // should not reject +}); +assert.strictEqual(asyncLocalStorage.getStore(), undefined); diff --git a/test/async-hooks/test-async-local-storage-enable-disable.js b/test/async-hooks/test-async-local-storage-enable-disable.js new file mode 100644 index 00000000000..93132079827 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-enable-disable.js @@ -0,0 +1,32 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.runSyncAndReturn(new Map(), () => { + asyncLocalStorage.getStore().set('foo', 'bar'); + process.nextTick(() => { + assert.strictEqual(asyncLocalStorage.getStore().get('foo'), 'bar'); + process.nextTick(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + }); + + asyncLocalStorage.disable(); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + + // Calls to exit() should not mess with enabled status + asyncLocalStorage.exit(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + }); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + + process.nextTick(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + asyncLocalStorage.runSyncAndReturn(new Map(), () => { + assert.notStrictEqual(asyncLocalStorage.getStore(), undefined); + }); + }); + }); +}); diff --git a/test/async-hooks/test-async-local-storage-enter-with.js b/test/async-hooks/test-async-local-storage-enter-with.js new file mode 100644 index 00000000000..736dd83f853 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-enter-with.js @@ -0,0 +1,20 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +setImmediate(() => { + const store = { foo: 'bar' }; + asyncLocalStorage.enterWith(store); + + assert.strictEqual(asyncLocalStorage.getStore(), store); + setTimeout(() => { + assert.strictEqual(asyncLocalStorage.getStore(), store); + }, 10); +}); + +setTimeout(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); +}, 10); diff --git a/test/async-hooks/test-async-local-storage-errors-async.js b/test/async-hooks/test-async-local-storage-errors-async.js new file mode 100644 index 00000000000..b6f0b4fa742 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-errors-async.js @@ -0,0 +1,26 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +// case 1 fully async APIS (safe) +const asyncLocalStorage = new AsyncLocalStorage(); + +let i = 0; +process.setUncaughtExceptionCaptureCallback((err) => { + ++i; + assert.strictEqual(err.message, 'err' + i); + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'node'); +}); + +asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('hello', 'node'); + setTimeout(() => { + process.nextTick(() => { + assert.strictEqual(i, 2); + }); + throw new Error('err2'); + }, 0); + throw new Error('err1'); +}); diff --git a/test/async-hooks/test-async-local-storage-errors-sync-ret.js b/test/async-hooks/test-async-local-storage-errors-sync-ret.js new file mode 100644 index 00000000000..3b5c57a7347 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-errors-sync-ret.js @@ -0,0 +1,31 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +// case 2 using *AndReturn calls (dual behaviors) +const asyncLocalStorage = new AsyncLocalStorage(); + +let i = 0; +process.setUncaughtExceptionCaptureCallback((err) => { + ++i; + assert.strictEqual(err.message, 'err2'); + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'node'); +}); + +try { + asyncLocalStorage.runSyncAndReturn(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('hello', 'node'); + setTimeout(() => { + process.nextTick(() => { + assert.strictEqual(i, 1); + }); + throw new Error('err2'); + }, 0); + throw new Error('err1'); + }); +} catch (e) { + assert.strictEqual(e.message, 'err1'); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); +} diff --git a/test/async-hooks/test-async-local-storage-gcable.js b/test/async-hooks/test-async-local-storage-gcable.js new file mode 100644 index 00000000000..37b04b38d14 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-gcable.js @@ -0,0 +1,20 @@ +'use strict'; +// Flags: --expose_gc + +// This test ensures that AsyncLocalStorage gets gced once it was disabled +// and no strong references remain in userland. + +const common = require('../common'); +const { AsyncLocalStorage } = require('async_hooks'); +const onGC = require('../common/ongc'); + +let asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.runSyncAndReturn({}, () => { + asyncLocalStorage.disable(); + + onGC(asyncLocalStorage, { ongc: common.mustCall() }); +}); + +asyncLocalStorage = null; +global.gc(); diff --git a/test/async-hooks/test-async-local-storage-http.js b/test/async-hooks/test-async-local-storage-http.js new file mode 100644 index 00000000000..c7514d8280d --- /dev/null +++ b/test/async-hooks/test-async-local-storage-http.js @@ -0,0 +1,21 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); +const http = require('http'); + +const asyncLocalStorage = new AsyncLocalStorage(); +const server = http.createServer((req, res) => { + res.end('ok'); +}); + +server.listen(0, () => { + asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('hello', 'world'); + http.get({ host: 'localhost', port: server.address().port }, () => { + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'world'); + server.close(); + }); + }); +}); diff --git a/test/async-hooks/test-async-local-storage-misc-stores.js b/test/async-hooks/test-async-local-storage-misc-stores.js new file mode 100644 index 00000000000..56873008dd6 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-misc-stores.js @@ -0,0 +1,24 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run(42, () => { + assert.strictEqual(asyncLocalStorage.getStore(), 42); +}); + +const runStore = { foo: 'bar' }; +asyncLocalStorage.run(runStore, () => { + assert.strictEqual(asyncLocalStorage.getStore(), runStore); +}); + +asyncLocalStorage.runSyncAndReturn('hello node', () => { + assert.strictEqual(asyncLocalStorage.getStore(), 'hello node'); +}); + +const runSyncStore = { hello: 'node' }; +asyncLocalStorage.runSyncAndReturn(runSyncStore, () => { + assert.strictEqual(asyncLocalStorage.getStore(), runSyncStore); +}); diff --git a/test/async-hooks/test-async-local-storage-nested.js b/test/async-hooks/test-async-local-storage-nested.js new file mode 100644 index 00000000000..143d5d45de9 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-nested.js @@ -0,0 +1,38 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); +const outer = {}; +const inner = {}; + +function testInner() { + assert.strictEqual(asyncLocalStorage.getStore(), outer); + + asyncLocalStorage.run(inner, () => { + assert.strictEqual(asyncLocalStorage.getStore(), inner); + }); + assert.strictEqual(asyncLocalStorage.getStore(), outer); + + asyncLocalStorage.exit(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + }); + assert.strictEqual(asyncLocalStorage.getStore(), outer); + + asyncLocalStorage.runSyncAndReturn(inner, () => { + assert.strictEqual(asyncLocalStorage.getStore(), inner); + }); + assert.strictEqual(asyncLocalStorage.getStore(), outer); + + asyncLocalStorage.exitSyncAndReturn(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + }); + assert.strictEqual(asyncLocalStorage.getStore(), outer); +} + +asyncLocalStorage.run(outer, testInner); +assert.strictEqual(asyncLocalStorage.getStore(), undefined); + +asyncLocalStorage.runSyncAndReturn(outer, testInner); +assert.strictEqual(asyncLocalStorage.getStore(), undefined); diff --git a/test/async-hooks/test-async-local-storage-no-mix-contexts.js b/test/async-hooks/test-async-local-storage-no-mix-contexts.js new file mode 100644 index 00000000000..3a6b352c94c --- /dev/null +++ b/test/async-hooks/test-async-local-storage-no-mix-contexts.js @@ -0,0 +1,38 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const asyncLocalStorage = new AsyncLocalStorage(); +const asyncLocalStorage2 = new AsyncLocalStorage(); + +setTimeout(() => { + asyncLocalStorage.run(new Map(), () => { + asyncLocalStorage2.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + const store2 = asyncLocalStorage2.getStore(); + store.set('hello', 'world'); + store2.set('hello', 'foo'); + setTimeout(() => { + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'world'); + assert.strictEqual(asyncLocalStorage2.getStore().get('hello'), 'foo'); + asyncLocalStorage.exit(() => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + assert.strictEqual(asyncLocalStorage2.getStore().get('hello'), 'foo'); + }); + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'world'); + assert.strictEqual(asyncLocalStorage2.getStore().get('hello'), 'foo'); + }, 200); + }); + }); +}, 100); + +setTimeout(() => { + asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('hello', 'earth'); + setTimeout(() => { + assert.strictEqual(asyncLocalStorage.getStore().get('hello'), 'earth'); + }, 100); + }); +}, 100); diff --git a/test/async-hooks/test-async-local-storage-promises.js b/test/async-hooks/test-async-local-storage-promises.js new file mode 100644 index 00000000000..0e4968534bc --- /dev/null +++ b/test/async-hooks/test-async-local-storage-promises.js @@ -0,0 +1,28 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +async function main() { + const asyncLocalStorage = new AsyncLocalStorage(); + const err = new Error(); + const next = () => Promise.resolve() + .then(() => { + assert.strictEqual(asyncLocalStorage.getStore().get('a'), 1); + throw err; + }); + await new Promise((resolve, reject) => { + asyncLocalStorage.run(new Map(), () => { + const store = asyncLocalStorage.getStore(); + store.set('a', 1); + next().then(resolve, reject); + }); + }) + .catch((e) => { + assert.strictEqual(asyncLocalStorage.getStore(), undefined); + assert.strictEqual(e, err); + }); + assert.strictEqual(asyncLocalStorage.getStore(), undefined); +} + +main(); diff --git a/test/benchmark/test-benchmark-assert.js b/test/benchmark/test-benchmark-assert.js index 8a8ba024448..5ec2319c28a 100644 --- a/test/benchmark/test-benchmark-assert.js +++ b/test/benchmark/test-benchmark-assert.js @@ -7,15 +7,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark( - 'assert', - [ - 'strict=1', - 'len=1', - 'method=', - 'n=1', - 'primitive=null', - 'size=1', - 'type=Int8Array' - ] -); +runBenchmark('assert'); diff --git a/test/benchmark/test-benchmark-async-hooks.js b/test/benchmark/test-benchmark-async-hooks.js index 662a6a07f31..c9ea2c1e86d 100644 --- a/test/benchmark/test-benchmark-async-hooks.js +++ b/test/benchmark/test-benchmark-async-hooks.js @@ -10,13 +10,4 @@ if (!common.enoughTestMem) const runBenchmark = require('../common/benchmark'); -runBenchmark('async_hooks', - [ - 'asyncHooks=all', - 'connections=50', - 'method=trackingDisabled', - 'n=10', - 'type=async-resource', - 'asyncMethod=async' - ], - {}); +runBenchmark('async_hooks'); diff --git a/test/benchmark/test-benchmark-buffer.js b/test/benchmark/test-benchmark-buffer.js index 57130ffb08b..af93842b0b9 100644 --- a/test/benchmark/test-benchmark-buffer.js +++ b/test/benchmark/test-benchmark-buffer.js @@ -4,30 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('buffers', - [ - 'aligned=true', - 'args=1', - 'buffer=fast', - 'bytes=0', - 'byteLength=1', - 'charsPerLine=6', - 'difflen=false', - 'encoding=utf8', - 'endian=BE', - 'extraSize=1', - 'len=256', - 'linesCount=1', - 'method=', - 'n=1', - 'partial=true', - 'pieces=1', - 'pieceSize=1', - 'search=@', - 'size=1', - 'source=array', - 'type=', - 'value=0', - 'withTotalLength=0' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('buffers', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-child-process.js b/test/benchmark/test-benchmark-child-process.js index 365777069bc..043620de12f 100644 --- a/test/benchmark/test-benchmark-child-process.js +++ b/test/benchmark/test-benchmark-child-process.js @@ -4,12 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('child_process', - [ - 'dur=0', - 'n=1', - 'len=1', - 'params=1', - 'methodName=execSync', - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('child_process', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-cluster.js b/test/benchmark/test-benchmark-cluster.js index 26df7ec239a..b24aced5d58 100644 --- a/test/benchmark/test-benchmark-cluster.js +++ b/test/benchmark/test-benchmark-cluster.js @@ -4,9 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('cluster', [ - 'n=1', - 'payload=string', - 'sendsPerBroadcast=1', - 'serialization=json', -]); +runBenchmark('cluster'); diff --git a/test/benchmark/test-benchmark-crypto.js b/test/benchmark/test-benchmark-crypto.js index 6ba71471cf4..7f6988acf23 100644 --- a/test/benchmark/test-benchmark-crypto.js +++ b/test/benchmark/test-benchmark-crypto.js @@ -10,18 +10,4 @@ if (common.hasFipsCrypto) const runBenchmark = require('../common/benchmark'); -runBenchmark('crypto', - [ - 'algo=sha256', - 'api=stream', - 'cipher=', - 'keylen=1024', - 'len=1', - 'n=1', - 'out=buffer', - 'size=1', - 'type=buf', - 'v=crypto', - 'writes=1', - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('crypto', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-dgram.js b/test/benchmark/test-benchmark-dgram.js index 8c93cd5a0c9..ceafdd77a2a 100644 --- a/test/benchmark/test-benchmark-dgram.js +++ b/test/benchmark/test-benchmark-dgram.js @@ -4,14 +4,7 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -// Because the dgram benchmarks use hardcoded ports, this should be in -// sequential rather than parallel to make sure it does not conflict with -// tests that choose random available ports. +// Dgram benchmarks use hardcoded ports. Thus, this test can not be run in +// parallel with tests that choose random ports. -runBenchmark('dgram', ['address=true', - 'chunks=2', - 'dur=0.1', - 'len=1', - 'n=1', - 'num=1', - 'type=send']); +runBenchmark('dgram'); diff --git a/test/benchmark/test-benchmark-dns.js b/test/benchmark/test-benchmark-dns.js index 811e9a44b9e..331a4c8ff0d 100644 --- a/test/benchmark/test-benchmark-dns.js +++ b/test/benchmark/test-benchmark-dns.js @@ -4,6 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -const env = { ...process.env, NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }; - -runBenchmark('dns', ['n=1', 'all=false', 'name=127.0.0.1'], env); +runBenchmark('dns', { ...process.env, NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-domain.js b/test/benchmark/test-benchmark-domain.js index e7d8b60b716..5ebbfc5ea3b 100644 --- a/test/benchmark/test-benchmark-domain.js +++ b/test/benchmark/test-benchmark-domain.js @@ -4,4 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('domain', ['n=1', 'args=0']); +runBenchmark('domain'); diff --git a/test/benchmark/test-benchmark-es.js b/test/benchmark/test-benchmark-es.js index 47fe4ab3f91..6886b3ce925 100644 --- a/test/benchmark/test-benchmark-es.js +++ b/test/benchmark/test-benchmark-es.js @@ -4,15 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('es', - [ - 'method=', - 'count=1', - 'context=null', - 'rest=0', - 'mode=', - 'n=1', - 'encoding=ascii', - 'size=1e1' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('es', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-events.js b/test/benchmark/test-benchmark-events.js index 06be60a84ab..53de4897dd7 100644 --- a/test/benchmark/test-benchmark-events.js +++ b/test/benchmark/test-benchmark-events.js @@ -4,6 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('events', - ['argc=0', 'listeners=1', 'n=1'], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('events', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-fs.js b/test/benchmark/test-benchmark-fs.js index cf382407235..3ef6be2b7eb 100644 --- a/test/benchmark/test-benchmark-fs.js +++ b/test/benchmark/test-benchmark-fs.js @@ -6,19 +6,4 @@ const runBenchmark = require('../common/benchmark'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -runBenchmark('fs', [ - 'bufferSize=32', - 'concurrent=1', - 'dir=.github', - 'dur=0.1', - 'encodingType=buf', - 'filesize=1024', - 'len=1024', - 'mode=callback', - 'n=1', - 'pathType=relative', - 'size=1', - 'statSyncType=fstatSync', - 'statType=fstat', - 'withFileTypes=false', -], { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('fs', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-http.js b/test/benchmark/test-benchmark-http.js index 43531ec51cc..a3d92c7e987 100644 --- a/test/benchmark/test-benchmark-http.js +++ b/test/benchmark/test-benchmark-http.js @@ -11,27 +11,4 @@ if (!common.enoughTestMem) const runBenchmark = require('../common/benchmark'); -runBenchmark('http', - [ - 'benchmarker=test-double-http', - 'arg=string', - 'c=1', - 'chunkedEnc=true', - 'chunks=0', - 'dur=0.1', - 'e=0', - 'input=keep-alive', - 'key=""', - 'len=1', - 'method=write', - 'n=1', - 'res=normal', - 'type=asc', - 'url=long', - 'value=X-Powered-By', - 'w=0', - ], - { - NODEJS_BENCHMARK_ZERO_ALLOWED: 1, - duration: 0 - }); +runBenchmark('http', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-http2.js b/test/benchmark/test-benchmark-http2.js index 1668a7b87fe..25dd771076e 100644 --- a/test/benchmark/test-benchmark-http2.js +++ b/test/benchmark/test-benchmark-http2.js @@ -13,17 +13,4 @@ if (!common.enoughTestMem) const runBenchmark = require('../common/benchmark'); -runBenchmark('http2', - [ - 'benchmarker=test-double-http2', - 'clients=1', - 'length=65536', - 'n=1', - 'nheaders=0', - 'requests=1', - 'streams=1' - ], - { - NODEJS_BENCHMARK_ZERO_ALLOWED: 1, - duration: 0 - }); +runBenchmark('http2', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-misc.js b/test/benchmark/test-benchmark-misc.js index 13bd41e5401..30707bfaf73 100644 --- a/test/benchmark/test-benchmark-misc.js +++ b/test/benchmark/test-benchmark-misc.js @@ -4,14 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('misc', [ - 'concat=0', - 'dur=0.1', - 'method=', - 'n=1', - 'type=', - 'code=1', - 'val=magyarország.icom.museum', - 'script=test/fixtures/semicolon', - 'mode=worker' -], { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('misc', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-module.js b/test/benchmark/test-benchmark-module.js index 4d49ed249c3..da8e108d53e 100644 --- a/test/benchmark/test-benchmark-module.js +++ b/test/benchmark/test-benchmark-module.js @@ -4,12 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('module', [ - 'cache=true', - 'dir=rel', - 'ext=', - 'fullPath=true', - 'n=1', - 'name=/', - 'useCache=true', -]); +runBenchmark('module'); diff --git a/test/benchmark/test-benchmark-napi.js b/test/benchmark/test-benchmark-napi.js index 1a1ff23d60c..5c6a8aa0118 100644 --- a/test/benchmark/test-benchmark-napi.js +++ b/test/benchmark/test-benchmark-napi.js @@ -15,10 +15,4 @@ if (process.features.debug) { } const runBenchmark = require('../common/benchmark'); -runBenchmark('napi', - [ - 'n=1', - 'engine=v8', - 'type=String' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('napi', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-net.js b/test/benchmark/test-benchmark-net.js index f51c615b5df..df8ea801169 100644 --- a/test/benchmark/test-benchmark-net.js +++ b/test/benchmark/test-benchmark-net.js @@ -8,13 +8,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('net', - [ - 'dur=0', - 'len=1024', - 'recvbufgenfn=false', - 'recvbuflen=0', - 'sendchunklen=256', - 'type=buf' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('net', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-os.js b/test/benchmark/test-benchmark-os.js index 836e0e65048..dbedd7f582d 100644 --- a/test/benchmark/test-benchmark-os.js +++ b/test/benchmark/test-benchmark-os.js @@ -4,4 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('os', ['n=1']); +runBenchmark('os'); diff --git a/test/benchmark/test-benchmark-path.js b/test/benchmark/test-benchmark-path.js index 890fd7ae41f..3bca4f2a11b 100644 --- a/test/benchmark/test-benchmark-path.js +++ b/test/benchmark/test-benchmark-path.js @@ -4,11 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('path', - [ - 'n=1', - 'path=', - 'pathext=', - 'paths=', - 'props=' - ], { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('path', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-process.js b/test/benchmark/test-benchmark-process.js index a73fc075bfc..c6687f30234 100644 --- a/test/benchmark/test-benchmark-process.js +++ b/test/benchmark/test-benchmark-process.js @@ -4,9 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('process', - [ - 'n=1', - 'type=raw', - 'operation=enumerate', - ], { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('process', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-querystring.js b/test/benchmark/test-benchmark-querystring.js index 77d0ac99ff9..6fee9bb3914 100644 --- a/test/benchmark/test-benchmark-querystring.js +++ b/test/benchmark/test-benchmark-querystring.js @@ -4,9 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('querystring', - [ 'n=1', - 'input="there is nothing to unescape here"', - 'type=noencode' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('querystring', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-streams.js b/test/benchmark/test-benchmark-streams.js index fa61b8dc4e0..68c8478a739 100644 --- a/test/benchmark/test-benchmark-streams.js +++ b/test/benchmark/test-benchmark-streams.js @@ -4,13 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('streams', - [ - 'kind=duplex', - 'n=1', - 'sync=no', - 'writev=no', - 'callback=no', - 'type=buffer', - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('streams', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-string_decoder.js b/test/benchmark/test-benchmark-string_decoder.js index f2fd6abe4b8..721529e5ae6 100644 --- a/test/benchmark/test-benchmark-string_decoder.js +++ b/test/benchmark/test-benchmark-string_decoder.js @@ -4,7 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('string_decoder', ['chunkLen=16', - 'encoding=utf8', - 'inLen=32', - 'n=1']); +runBenchmark('string_decoder'); diff --git a/test/benchmark/test-benchmark-timers.js b/test/benchmark/test-benchmark-timers.js index af3c49bd714..db4927ab32e 100644 --- a/test/benchmark/test-benchmark-timers.js +++ b/test/benchmark/test-benchmark-timers.js @@ -4,10 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('timers', - [ - 'direction=start', - 'n=1', - 'type=depth', - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('timers', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-tls.js b/test/benchmark/test-benchmark-tls.js index 40c14af8302..c9a87c15770 100644 --- a/test/benchmark/test-benchmark-tls.js +++ b/test/benchmark/test-benchmark-tls.js @@ -14,16 +14,4 @@ if (!common.enoughTestMem) const runBenchmark = require('../common/benchmark'); -runBenchmark('tls', - [ - 'concurrency=1', - 'dur=0.1', - 'n=1', - 'size=2', - 'securing=SecurePair', - 'type=asc' - ], - { - NODEJS_BENCHMARK_ZERO_ALLOWED: 1, - duration: 0 - }); +runBenchmark('tls', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-url.js b/test/benchmark/test-benchmark-url.js index 8560c918fd1..664e7c4d8dc 100644 --- a/test/benchmark/test-benchmark-url.js +++ b/test/benchmark/test-benchmark-url.js @@ -4,22 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('url', - [ - 'method=legacy', - 'e=0', - 'loopMethod=forEach', - 'accessMethod=get', - 'type=short', - 'searchParam=noencode', - 'href=short', - 'input=short', - 'domain=empty', - 'path=up', - 'to=ascii', - 'prop=href', - 'n=1', - 'param=one', - 'withBase=false' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('url', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-util.js b/test/benchmark/test-benchmark-util.js index b66d4fdb9b4..d0c16c62326 100644 --- a/test/benchmark/test-benchmark-util.js +++ b/test/benchmark/test-benchmark-util.js @@ -4,17 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('util', - ['argument=false', - 'input=', - 'method=Array', - 'n=1', - 'option=none', - 'pos=start', - 'size=1', - 'type=', - 'len=1', - 'version=native', - 'isProxy=1', - 'showProxy=1'], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('util', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-v8.js b/test/benchmark/test-benchmark-v8.js index 22861785db9..efeaac8328c 100644 --- a/test/benchmark/test-benchmark-v8.js +++ b/test/benchmark/test-benchmark-v8.js @@ -4,9 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('v8', - [ - 'method=getHeapStatistics', - 'n=1' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('v8', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-vm.js b/test/benchmark/test-benchmark-vm.js index 79ab6a03402..e9c4e3f1389 100644 --- a/test/benchmark/test-benchmark-vm.js +++ b/test/benchmark/test-benchmark-vm.js @@ -4,10 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('vm', - [ - 'breakOnSigint=0', - 'withSigintListener=0', - 'n=1' - ], - { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +runBenchmark('vm', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-worker.js b/test/benchmark/test-benchmark-worker.js index ae9a8a6701a..a4319d4face 100644 --- a/test/benchmark/test-benchmark-worker.js +++ b/test/benchmark/test-benchmark-worker.js @@ -11,13 +11,4 @@ if (!common.enoughTestMem) const runBenchmark = require('../common/benchmark'); -runBenchmark('worker', - [ - 'n=1', - 'sendsPerBroadcast=1', - 'workers=1', - 'payload=string' - ], - { - NODEJS_BENCHMARK_ZERO_ALLOWED: 1 - }); +runBenchmark('worker', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/benchmark/test-benchmark-zlib.js b/test/benchmark/test-benchmark-zlib.js index adc7e3d47b0..e3c4723aa97 100644 --- a/test/benchmark/test-benchmark-zlib.js +++ b/test/benchmark/test-benchmark-zlib.js @@ -4,17 +4,4 @@ require('../common'); const runBenchmark = require('../common/benchmark'); -runBenchmark('zlib', - [ - 'algorithm=brotli', - 'chunkLen=1024', - 'duration=0.001', - 'inputLen=1024', - 'method=', - 'n=1', - 'options=true', - 'type=Deflate', - ], - { - 'NODEJS_BENCHMARK_ZERO_ALLOWED': 1 - }); +runBenchmark('zlib', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/cctest/test_environment.cc b/test/cctest/test_environment.cc index 132f7b44f7d..90c5cff5e09 100644 --- a/test/cctest/test_environment.cc +++ b/test/cctest/test_environment.cc @@ -32,23 +32,24 @@ class EnvironmentTest : public EnvironmentTestFixture { } }; -TEST_F(EnvironmentTest, PreExeuctionPreparation) { - const v8::HandleScope handle_scope(isolate_); - const Argv argv; - Env env {handle_scope, argv}; - - v8::Local context = isolate_->GetCurrentContext(); - - const char* run_script = "process.argv0"; - v8::Local script = v8::Script::Compile( - context, - v8::String::NewFromOneByte(isolate_, - reinterpret_cast(run_script), - v8::NewStringType::kNormal).ToLocalChecked()) - .ToLocalChecked(); - v8::Local result = script->Run(context).ToLocalChecked(); - CHECK(result->IsString()); -} +// TODO(codebytere): re-enable this test. +// TEST_F(EnvironmentTest, PreExeuctionPreparation) { +// const v8::HandleScope handle_scope(isolate_); +// const Argv argv; +// Env env {handle_scope, argv}; + +// v8::Local context = isolate_->GetCurrentContext(); + +// const char* run_script = "process.argv0"; +// v8::Local script = v8::Script::Compile( +// context, +// v8::String::NewFromOneByte(isolate_, +// reinterpret_cast(run_script), +// v8::NewStringType::kNormal).ToLocalChecked()) +// .ToLocalChecked(); +// v8::Local result = script->Run(context).ToLocalChecked(); +// CHECK(result->IsString()); +// } TEST_F(EnvironmentTest, AtExitWithEnvironment) { const v8::HandleScope handle_scope(isolate_); diff --git a/test/cctest/test_sockaddr.cc b/test/cctest/test_sockaddr.cc new file mode 100644 index 00000000000..8c23463f11d --- /dev/null +++ b/test/cctest/test_sockaddr.cc @@ -0,0 +1,57 @@ +#include "node_sockaddr-inl.h" +#include "gtest/gtest.h" + +using node::SocketAddress; + +TEST(SocketAddress, SocketAddress) { + CHECK(SocketAddress::is_numeric_host("123.123.123.123")); + CHECK(!SocketAddress::is_numeric_host("localhost")); + + sockaddr_storage storage; + sockaddr_storage storage2; + SocketAddress::ToSockAddr(AF_INET, "123.123.123.123", 443, &storage); + SocketAddress::ToSockAddr(AF_INET, "1.1.1.1", 80, &storage2); + + SocketAddress addr(reinterpret_cast(&storage)); + SocketAddress addr2(reinterpret_cast(&storage2)); + + CHECK_EQ(addr.length(), sizeof(sockaddr_in)); + CHECK_EQ(addr.family(), AF_INET); + CHECK_EQ(addr.address(), "123.123.123.123"); + CHECK_EQ(addr.port(), 443); + + addr.set_flow_label(12345); + CHECK_EQ(addr.flow_label(), 0); + + CHECK_NE(addr, addr2); + CHECK_EQ(addr, addr); + + CHECK_EQ(SocketAddress::Hash()(addr), SocketAddress::Hash()(addr)); + CHECK_NE(SocketAddress::Hash()(addr), SocketAddress::Hash()(addr2)); + + addr.Update(reinterpret_cast(&storage2), sizeof(sockaddr_in)); + CHECK_EQ(addr.length(), sizeof(sockaddr_in)); + CHECK_EQ(addr.family(), AF_INET); + CHECK_EQ(addr.address(), "1.1.1.1"); + CHECK_EQ(addr.port(), 80); + + SocketAddress::Map map; + map[addr]++; + map[addr]++; + CHECK_EQ(map[addr], 2); +} + +TEST(SocketAddress, SocketAddressIPv6) { + sockaddr_storage storage; + SocketAddress::ToSockAddr(AF_INET6, "::1", 443, &storage); + + SocketAddress addr(reinterpret_cast(&storage)); + + CHECK_EQ(addr.length(), sizeof(sockaddr_in6)); + CHECK_EQ(addr.family(), AF_INET6); + CHECK_EQ(addr.address(), "::1"); + CHECK_EQ(addr.port(), 443); + + addr.set_flow_label(12345); + CHECK_EQ(addr.flow_label(), 12345); +} diff --git a/test/common/README.md b/test/common/README.md index d43b0ebbff3..5479a39d8c2 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -68,6 +68,13 @@ sure no unexpected rejections occur, because currently they result in silent failures. However, it is useful in some rare cases to disable it, for example if the `unhandledRejection` hook is directly used by the test. +### `enoughTestCpu` + +* [<boolean>][] + +Indicates if there is more than 1 CPU or that the single CPU has a speed of at +least 1 GHz. + ### `enoughTestMem` * [<boolean>][] diff --git a/test/common/benchmark.js b/test/common/benchmark.js index f630bb9d0e6..56351c92505 100644 --- a/test/common/benchmark.js +++ b/test/common/benchmark.js @@ -8,13 +8,8 @@ const path = require('path'); const runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js'); -function runBenchmark(name, args, env) { - const argv = []; - - for (let i = 0; i < args.length; i++) { - argv.push('--set'); - argv.push(args[i]); - } +function runBenchmark(name, env) { + const argv = ['test']; argv.push(name); diff --git a/test/common/index.js b/test/common/index.js index 2e2d14dc0e6..28ce841c48c 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -547,19 +547,6 @@ function expectsError(validator, exact) { }, exact); } -const suffix = 'This is caused by either a bug in Node.js ' + - 'or incorrect usage of Node.js internals.\n' + - 'Please open an issue with this stack trace at ' + - 'https://github.com/nodejs/node/issues\n'; - -function expectsInternalAssertion(fn, message) { - assert.throws(fn, { - message: `${message}\n${suffix}`, - name: 'Error', - code: 'ERR_INTERNAL_ASSERTION' - }); -} - function skipIfInspectorDisabled() { if (!process.features.inspector) { skip('V8 inspector is disabled'); @@ -672,7 +659,7 @@ function invalidArgTypeHelper(input) { return ` Received type ${typeof input} (${inspected})`; } -module.exports = { +const common = { allowGlobals, buildType, canCreateSymLink, @@ -680,7 +667,6 @@ module.exports = { createZeroFilledFile, disableCrashOnUnhandledRejection, expectsError, - expectsInternalAssertion, expectWarning, getArrayBufferViews, getBufferSources, @@ -717,7 +703,7 @@ module.exports = { skipIfReportDisabled, skipIfWorker, - get enoughTestCPU() { + get enoughTestCpu() { const cpus = require('os').cpus(); return Array.isArray(cpus) && (cpus.length > 1 || cpus[0].speed > 999); }, @@ -815,3 +801,12 @@ module.exports = { } }; + +const validProperties = new Set(Object.keys(common)); +module.exports = new Proxy(common, { + get(obj, prop) { + if (!validProperties.has(prop)) + throw new Error(`Using invalid common property: '${prop}'`); + return obj[prop]; + } +}); diff --git a/test/common/index.mjs b/test/common/index.mjs index a5774fc008a..96e6699e3c6 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -37,7 +37,6 @@ const { mustNotCall, printSkipMessage, skip, - ArrayStream, nodeProcessAborted, isAlive, expectWarning, @@ -83,7 +82,6 @@ export { mustNotCall, printSkipMessage, skip, - ArrayStream, nodeProcessAborted, isAlive, expectWarning, diff --git a/test/common/inspector-helper.js b/test/common/inspector-helper.js index 42d6baed441..d430137746d 100644 --- a/test/common/inspector-helper.js +++ b/test/common/inspector-helper.js @@ -344,6 +344,9 @@ class NodeInstance extends EventEmitter { this._shutdownPromise = new Promise((resolve) => { this._process.once('exit', (exitCode, signal) => { + if (signal) { + console.error(`[err] child process crashed, signal ${signal}`); + } resolve({ exitCode, signal }); this._running = false; }); diff --git a/test/es-module/test-esm-exports.mjs b/test/es-module/test-esm-exports.mjs index bdd4a975cf7..7dbc9635028 100644 --- a/test/es-module/test-esm-exports.mjs +++ b/test/es-module/test-esm-exports.mjs @@ -32,7 +32,7 @@ import fromInside from '../fixtures/node_modules/pkgexports/lib/hole.js'; ['pkgexports/resolve-self', isRequire ? { default: 'self-cjs' } : { default: 'self-mjs' }], // Resolve self sugar - ['pkgexports-sugar', { default: 'main' }] + ['pkgexports-sugar', { default: 'main' }], ]); for (const [validSpecifier, expected] of validSpecifiers) { @@ -53,48 +53,59 @@ import fromInside from '../fixtures/node_modules/pkgexports/lib/hole.js'; // Sugar cases still encapsulate ['pkgexports-sugar/not-exported.js', './not-exported.js'], ['pkgexports-sugar2/not-exported.js', './not-exported.js'], + // Conditional exports with no match are "not exported" errors + ['pkgexports/invalid1', './invalid1'], + ['pkgexports/invalid4', './invalid4'], ]); const invalidExports = new Map([ - // Even though 'pkgexports/sub/asdf.js' works, alternate "path-like" - // variants do not to prevent confusion and accidental loopholes. - ['pkgexports/sub/./../asdf.js', './sub/./../asdf.js'], + // Directory mappings require a trailing / to work + ['pkgexports/missingtrailer/x', './missingtrailer/'], // This path steps back inside the package but goes through an exports // target that escapes the package, so we still catch that as invalid - ['pkgexports/belowdir/pkgexports/asdf.js', './belowdir/pkgexports/asdf.js'], + ['pkgexports/belowdir/pkgexports/asdf.js', './belowdir/'], // This target file steps below the package ['pkgexports/belowfile', './belowfile'], - // Directory mappings require a trailing / to work - ['pkgexports/missingtrailer/x', './missingtrailer/x'], // Invalid target handling ['pkgexports/null', './null'], - ['pkgexports/invalid1', './invalid1'], ['pkgexports/invalid2', './invalid2'], ['pkgexports/invalid3', './invalid3'], - ['pkgexports/invalid4', './invalid4'], // Missing / invalid fallbacks ['pkgexports/nofallback1', './nofallback1'], ['pkgexports/nofallback2', './nofallback2'], // Reaching into nested node_modules ['pkgexports/nodemodules', './nodemodules'], + // Self resolve invalid + ['pkgexports/resolve-self-invalid', './invalid2'], + ]); + + const invalidSpecifiers = new Map([ + // Even though 'pkgexports/sub/asdf.js' works, alternate "path-like" + // variants do not to prevent confusion and accidental loopholes. + ['pkgexports/sub/./../asdf.js', './sub/./../asdf.js'], ]); for (const [specifier, subpath] of undefinedExports) { loadFixture(specifier).catch(mustCall((err) => { - strictEqual(err.code, (isRequire ? '' : 'ERR_') + 'MODULE_NOT_FOUND'); - assertStartsWith(err.message, 'Package exports'); - assertIncludes(err.message, `do not define a '${subpath}' subpath`); + strictEqual(err.code, 'ERR_PACKAGE_PATH_NOT_EXPORTED'); + assertStartsWith(err.message, 'Package subpath '); + assertIncludes(err.message, subpath); })); } for (const [specifier, subpath] of invalidExports) { loadFixture(specifier).catch(mustCall((err) => { - strictEqual(err.code, (isRequire ? '' : 'ERR_') + 'MODULE_NOT_FOUND'); - assertStartsWith(err.message, (isRequire ? 'Package exports' : - 'Cannot resolve')); - assertIncludes(err.message, isRequire ? - `do not define a valid '${subpath}' target` : - `matched for '${subpath}'`); + strictEqual(err.code, 'ERR_INVALID_PACKAGE_TARGET'); + assertStartsWith(err.message, 'Invalid "exports"'); + assertIncludes(err.message, subpath); + })); + } + + for (const [specifier, subpath] of invalidSpecifiers) { + loadFixture(specifier).catch(mustCall((err) => { + strictEqual(err.code, 'ERR_INVALID_MODULE_SPECIFIER'); + assertStartsWith(err.message, 'Package subpath '); + assertIncludes(err.message, subpath); })); } @@ -102,8 +113,8 @@ import fromInside from '../fixtures/node_modules/pkgexports/lib/hole.js'; // of falling back to main if (isRequire) { loadFixture('pkgexports-main').catch(mustCall((err) => { - strictEqual(err.code, 'MODULE_NOT_FOUND'); - assertStartsWith(err.message, 'No valid export'); + strictEqual(err.code, 'ERR_PACKAGE_PATH_NOT_EXPORTED'); + assertStartsWith(err.message, 'No "exports" main '); })); } @@ -130,8 +141,7 @@ import fromInside from '../fixtures/node_modules/pkgexports/lib/hole.js'; // Sugar conditional exports main mixed failure case loadFixture('pkgexports-sugar-fail').catch(mustCall((err) => { strictEqual(err.code, 'ERR_INVALID_PACKAGE_CONFIG'); - assertStartsWith(err.message, (isRequire ? 'Invalid package' : - 'Cannot resolve')); + assertStartsWith(err.message, 'Invalid package'); assertIncludes(err.message, '"exports" cannot contain some keys starting ' + 'with \'.\' and some not. The exports object must either be an object of ' + 'package subpath keys or an object of main entry condition name keys ' + diff --git a/test/es-module/test-esm-fs-promises.mjs b/test/es-module/test-esm-fs-promises.mjs new file mode 100644 index 00000000000..76901f4fc16 --- /dev/null +++ b/test/es-module/test-esm-fs-promises.mjs @@ -0,0 +1,5 @@ +import '../common/index.mjs'; +import { stat } from 'fs/promises'; + +// Should not reject. +stat(new URL(import.meta.url)); diff --git a/test/es-module/test-esm-nowarn-exports.mjs b/test/es-module/test-esm-nowarn-exports.mjs new file mode 100644 index 00000000000..13bfaf9b4f3 --- /dev/null +++ b/test/es-module/test-esm-nowarn-exports.mjs @@ -0,0 +1,25 @@ +import '../common/index.mjs'; +import { path } from '../common/fixtures.mjs'; +import { strictEqual, ok } from 'assert'; +import { spawn } from 'child_process'; + +const child = spawn(process.execPath, [ + '--experimental-import-meta-resolve', + path('/es-modules/import-resolve-exports.mjs') +]); + +let stderr = ''; +child.stderr.setEncoding('utf8'); +child.stderr.on('data', (data) => { + stderr += data; +}); +child.on('close', (code, signal) => { + strictEqual(code, 0); + strictEqual(signal, null); + ok(stderr.toString().includes( + 'ExperimentalWarning: The ESM module loader is experimental' + )); + ok(!stderr.toString().includes( + 'ExperimentalWarning: Conditional exports' + )); +}); diff --git a/test/fixtures/es-modules/import-resolve-exports.mjs b/test/fixtures/es-modules/import-resolve-exports.mjs new file mode 100644 index 00000000000..0bbce4fbc5e --- /dev/null +++ b/test/fixtures/es-modules/import-resolve-exports.mjs @@ -0,0 +1,10 @@ +import { strictEqual } from 'assert'; + +(async () => { + const resolved = await import.meta.resolve('pkgexports-sugar'); + strictEqual(typeof resolved, 'string'); +})() +.catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/test/fixtures/node_modules/pkgexports/package.json b/test/fixtures/node_modules/pkgexports/package.json index 02e06f0ebe5..7f417ad5457 100644 --- a/test/fixtures/node_modules/pkgexports/package.json +++ b/test/fixtures/node_modules/pkgexports/package.json @@ -35,6 +35,10 @@ "./resolve-self": { "require": "./resolve-self.js", "import": "./resolve-self.mjs" + }, + "./resolve-self-invalid": { + "require": "./resolve-self-invalid.js", + "import": "./resolve-self-invalid.mjs" } } } diff --git a/test/fixtures/node_modules/pkgexports/resolve-self-invalid.js b/test/fixtures/node_modules/pkgexports/resolve-self-invalid.js new file mode 100644 index 00000000000..c3ebf76fc1b --- /dev/null +++ b/test/fixtures/node_modules/pkgexports/resolve-self-invalid.js @@ -0,0 +1 @@ +require('pkg-exports/invalid2'); diff --git a/test/fixtures/node_modules/pkgexports/resolve-self-invalid.mjs b/test/fixtures/node_modules/pkgexports/resolve-self-invalid.mjs new file mode 100644 index 00000000000..1edbf62c4b0 --- /dev/null +++ b/test/fixtures/node_modules/pkgexports/resolve-self-invalid.mjs @@ -0,0 +1 @@ +import 'pkg-exports/invalid2'; diff --git a/test/fixtures/require-empty-main/index.js b/test/fixtures/require-empty-main/index.js new file mode 100644 index 00000000000..d2ed2dd2202 --- /dev/null +++ b/test/fixtures/require-empty-main/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = 42; diff --git a/test/fixtures/require-empty-main/package.json b/test/fixtures/require-empty-main/package.json new file mode 100644 index 00000000000..3f0b7c677ac --- /dev/null +++ b/test/fixtures/require-empty-main/package.json @@ -0,0 +1 @@ +{"main":""} diff --git a/test/internet/test-dns-any.js b/test/internet/test-dns-any.js index 3e8eb07e7e6..d60f00f0980 100644 --- a/test/internet/test-dns-any.js +++ b/test/internet/test-dns-any.js @@ -115,28 +115,6 @@ function processResult(res) { return types; } -TEST(async function test_google(done) { - function validateResult(res) { - const types = processResult(res); - assert.ok( - types.A && types.AAAA && types.MX && types.NS && types.TXT && types.SOA, - `Missing record type, found ${Object.keys(types)}`); - } - - validateResult(await dnsPromises.resolve('google.com', 'ANY')); - - const req = dns.resolve( - 'google.com', - 'ANY', - common.mustCall(function(err, ret) { - assert.ifError(err); - validateResult(ret); - done(); - })); - - checkWrap(req); -}); - TEST(async function test_sip2sip_for_naptr(done) { function validateResult(res) { const types = processResult(res); diff --git a/test/js-native-api/test_bigint/test_bigint.c b/test/js-native-api/test_bigint/test_bigint.c index 4befc171baa..c62a0a6a6c2 100644 --- a/test/js-native-api/test_bigint/test_bigint.c +++ b/test/js-native-api/test_bigint/test_bigint.c @@ -1,5 +1,3 @@ -#define NAPI_EXPERIMENTAL - #include #include #include diff --git a/test/js-native-api/test_general/test.js b/test/js-native-api/test_general/test.js index 9b847f4f339..a4b3df5535a 100644 --- a/test/js-native-api/test_general/test.js +++ b/test/js-native-api/test_general/test.js @@ -33,7 +33,7 @@ assert.notStrictEqual(test_general.testGetPrototype(baseObject), test_general.testGetPrototype(extendedObject)); // Test version management functions. The expected version is currently 4. -assert.strictEqual(test_general.testGetVersion(), 5); +assert.strictEqual(test_general.testGetVersion(), 6); [ 123, diff --git a/test/js-native-api/test_instance_data/test_instance_data.c b/test/js-native-api/test_instance_data/test_instance_data.c index a64ebec0c1a..5255c3e4a02 100644 --- a/test/js-native-api/test_instance_data/test_instance_data.c +++ b/test/js-native-api/test_instance_data/test_instance_data.c @@ -1,6 +1,5 @@ #include #include -#define NAPI_EXPERIMENTAL #include #include "../common.h" diff --git a/test/js-native-api/test_object/test_null.c b/test/js-native-api/test_object/test_null.c index 523217f3c0a..b6bf4df31cc 100644 --- a/test/js-native-api/test_object/test_null.c +++ b/test/js-native-api/test_object/test_null.c @@ -1,4 +1,3 @@ -#define NAPI_EXPERIMENTAL #include #include "../common.h" diff --git a/test/js-native-api/test_object/test_object.c b/test/js-native-api/test_object/test_object.c index 9d9589238d4..08f619bf7ff 100644 --- a/test/js-native-api/test_object/test_object.c +++ b/test/js-native-api/test_object/test_object.c @@ -1,5 +1,3 @@ -#define NAPI_EXPERIMENTAL - #include #include "../common.h" #include diff --git a/test/known_issues/known_issues.status b/test/known_issues/known_issues.status index fdf2d86bcd5..8dd569a3950 100644 --- a/test/known_issues/known_issues.status +++ b/test/known_issues/known_issues.status @@ -5,6 +5,11 @@ prefix known_issues # sample-test : SKIP [true] # This section applies to all platforms +# This issue is unfixable with the current implementations of V8 and +# Node.js. Changes to make this fixable are unlikely to happen in the +# foreseeable future. The test itself is flaky and skipped. It +# serves as a demonstration of the issue only. +test-vm-timeout-escape-queuemicrotask: SKIP [$system==win32] diff --git a/test/known_issues/test-stream-writable-sync-error.js b/test/known_issues/test-stream-writable-sync-error.js new file mode 100644 index 00000000000..202cf7bf23e --- /dev/null +++ b/test/known_issues/test-stream-writable-sync-error.js @@ -0,0 +1,44 @@ +'use strict'; +const common = require('../common'); + +// Tests for the regression in _stream_writable discussed in +// https://github.com/nodejs/node/pull/31756 + +// Specifically, when a write callback is invoked synchronously +// with an error, and autoDestroy is not being used, the error +// should still be emitted on nextTick. + +const { Writable } = require('stream'); + +class MyStream extends Writable { + #cb = undefined; + + constructor() { + super({ autoDestroy: false }); + } + + _write(_, __, cb) { + this.#cb = cb; + } + + close() { + // Synchronously invoke the callback with an error. + this.#cb(new Error('foo')); + } +} + +const stream = new MyStream(); + +const mustError = common.mustCall(2); + +stream.write('test', () => {}); + +// Both error callbacks should be invoked. + +stream.on('error', mustError); + +stream.close(); + +// Without the fix in #31756, the error handler +// added after the call to close will not be invoked. +stream.on('error', mustError); diff --git a/test/known_issues/test-vm-timeout-escape-queuemicrotask.js b/test/known_issues/test-vm-timeout-escape-queuemicrotask.js index df0531bae1d..0d3a0b0c5c5 100644 --- a/test/known_issues/test-vm-timeout-escape-queuemicrotask.js +++ b/test/known_issues/test-vm-timeout-escape-queuemicrotask.js @@ -12,8 +12,8 @@ const NS_PER_MS = 1000000n; const hrtime = process.hrtime.bigint; -const loopDuration = common.platformTimeout(100n); -const timeout = common.platformTimeout(10); +const loopDuration = common.platformTimeout(1000n); +const timeout = common.platformTimeout(100); function loop() { const start = hrtime(); diff --git a/test/node-api/test_general/test_general.c b/test/node-api/test_general/test_general.c index 05bccaf5c2c..be805f782be 100644 --- a/test/node-api/test_general/test_general.c +++ b/test/node-api/test_general/test_general.c @@ -1,4 +1,3 @@ -#define NAPI_EXPERIMENTAL #include #include #include "../../js-native-api/common.h" diff --git a/test/node-api/test_instance_data/addon.c b/test/node-api/test_instance_data/addon.c index 928b4dfaf8e..7cf27bf28ab 100644 --- a/test/node-api/test_instance_data/addon.c +++ b/test/node-api/test_instance_data/addon.c @@ -1,6 +1,5 @@ #include #include -#define NAPI_EXPERIMENTAL #include static void addon_free(napi_env env, void* data, void* hint) { diff --git a/test/node-api/test_instance_data/test_instance_data.c b/test/node-api/test_instance_data/test_instance_data.c index 1a814e91c06..24fd502e836 100644 --- a/test/node-api/test_instance_data/test_instance_data.c +++ b/test/node-api/test_instance_data/test_instance_data.c @@ -1,6 +1,5 @@ #include #include -#define NAPI_EXPERIMENTAL #include #include "../../js-native-api/common.h" diff --git a/test/node-api/test_instance_data/test_ref_then_set.c b/test/node-api/test_instance_data/test_ref_then_set.c index a0df1e5b9f8..10c779d3241 100644 --- a/test/node-api/test_instance_data/test_ref_then_set.c +++ b/test/node-api/test_instance_data/test_ref_then_set.c @@ -1,6 +1,5 @@ #include #include -#define NAPI_EXPERIMENTAL #include napi_value addon_new(napi_env env, napi_value exports, bool ref_first); diff --git a/test/node-api/test_instance_data/test_set_then_ref.c b/test/node-api/test_instance_data/test_set_then_ref.c index 6ebed2d1e86..9a1b31aeed3 100644 --- a/test/node-api/test_instance_data/test_set_then_ref.c +++ b/test/node-api/test_instance_data/test_set_then_ref.c @@ -1,6 +1,5 @@ #include #include -#define NAPI_EXPERIMENTAL #include napi_value addon_new(napi_env env, napi_value exports, bool ref_first); diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 9b8c4807d07..bfe88f9720d 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -5,8 +5,6 @@ prefix parallel # sample-test : PASS,FLAKY [true] # This section applies to all platforms -# https://github.com/nodejs/node/issues/23207 -test-net-connect-options-port: PASS,FLAKY [$system==win32] # https://github.com/nodejs/node/issues/20750 @@ -31,6 +29,13 @@ test-worker-message-port-transfer-terminate: PASS,FLAKY [$system==linux] [$system==macos] +# https://github.com/nodejs/node/issues/30030 +test-dgram-connect-send-empty-buffer: PASS,FLAKY +test-dgram-connect-send-empty-array: PASS,FLAKY +test-dgram-connect-send-empty-packet: PASS,FLAKY +test-dgram-send-empty-array: PASS,FLAKY +test-dgram-send-empty-buffer: PASS,FLAKY +test-dgram-send-empty-packet: PASS,FLAKY [$arch==arm || $arch==arm64] # https://github.com/nodejs/node/issues/26610 diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js index b54fd88cc25..070a3803802 100644 --- a/test/parallel/test-buffer-alloc.js +++ b/test/parallel/test-buffer-alloc.js @@ -8,8 +8,8 @@ const SlowBuffer = require('buffer').SlowBuffer; // Verify the maximum Uint8Array size. There is no concrete limit by spec. The // internal limits should be updated if this fails. assert.throws( - () => new Uint8Array(2 ** 31), - { message: 'Invalid typed array length: 2147483648' } + () => new Uint8Array(2 ** 32), + { message: 'Invalid typed array length: 4294967296' } ); const b = Buffer.allocUnsafe(1024); diff --git a/test/parallel/test-buffer-over-max-length.js b/test/parallel/test-buffer-over-max-length.js index ac1f7cda8cf..b04f89aae03 100644 --- a/test/parallel/test-buffer-over-max-length.js +++ b/test/parallel/test-buffer-over-max-length.js @@ -25,5 +25,5 @@ assert.throws(() => Buffer.allocUnsafe(kMaxLength + 1), bufferMaxSizeMsg); assert.throws(() => Buffer.allocUnsafeSlow(kMaxLength + 1), bufferMaxSizeMsg); // issue GH-4331 -assert.throws(() => Buffer.allocUnsafe(0xFFFFFFFF), bufferMaxSizeMsg); +assert.throws(() => Buffer.allocUnsafe(0x100000000), bufferMaxSizeMsg); assert.throws(() => Buffer.allocUnsafe(0xFFFFFFFFF), bufferMaxSizeMsg); diff --git a/test/parallel/test-cli-node-options.js b/test/parallel/test-cli-node-options.js index 51a0d9a952e..0bbe4c9eae1 100644 --- a/test/parallel/test-cli-node-options.js +++ b/test/parallel/test-cli-node-options.js @@ -48,15 +48,14 @@ expectNoWorker('--trace-event-file-pattern {pid}-${rotation}.trace_events ' + '--trace-event-categories node.async_hooks', 'B\n'); expect('--unhandled-rejections=none', 'B\n'); -if (!common.isWindows) { +if (common.isLinux) { expect('--perf-basic-prof', 'B\n'); expect('--perf-basic-prof-only-functions', 'B\n'); -} -if (common.isLinux && ['arm', 'x64'].includes(process.arch)) { - // PerfJitLogger is only implemented in Linux. - expect('--perf-prof', 'B\n'); - expect('--perf-prof-unwinding-info', 'B\n'); + if (['arm', 'x64'].includes(process.arch)) { + expect('--perf-prof', 'B\n'); + expect('--perf-prof-unwinding-info', 'B\n'); + } } if (common.hasCrypto) { @@ -68,6 +67,7 @@ if (common.hasCrypto) { // V8 options expect('--abort_on-uncaught_exception', 'B\n'); expect('--disallow-code-generation-from-strings', 'B\n'); +expect('--jitless', 'B\n'); expect('--max-old-space-size=0', 'B\n'); expect('--stack-trace-limit=100', /(\s*at f \(\[(eval|worker eval)\]:1:\d*\)\r?\n)/, diff --git a/test/parallel/test-crypto-dh-stateless.js b/test/parallel/test-crypto-dh-stateless.js index f00ee997cfc..b01cea76b22 100644 --- a/test/parallel/test-crypto-dh-stateless.js +++ b/test/parallel/test-crypto-dh-stateless.js @@ -196,9 +196,10 @@ for (const [params1, params2] of [ test(crypto.generateKeyPairSync('ec', { namedCurve: 'secp256k1' }), crypto.generateKeyPairSync('ec', { namedCurve: 'secp256k1' })); +const not256k1 = crypto.getCurves().find((c) => /^sec.*(224|384|512)/.test(c)); assert.throws(() => { test(crypto.generateKeyPairSync('ec', { namedCurve: 'secp256k1' }), - crypto.generateKeyPairSync('ec', { namedCurve: 'secp224k1' })); + crypto.generateKeyPairSync('ec', { namedCurve: not256k1 })); }, { name: 'Error', code: 'ERR_OSSL_EVP_DIFFERENT_PARAMETERS' diff --git a/test/parallel/test-crypto-sign-verify.js b/test/parallel/test-crypto-sign-verify.js index e3d3d818a1a..b70bfccae47 100644 --- a/test/parallel/test-crypto-sign-verify.js +++ b/test/parallel/test-crypto-sign-verify.js @@ -527,6 +527,9 @@ assert.throws( // Unlike DER signatures, IEEE P1363 signatures have a predictable length. assert.strictEqual(sig.length, length); assert.strictEqual(crypto.verify('sha1', data, opts, sig), true); + assert.strictEqual(crypto.createVerify('sha1') + .update(data) + .verify(opts, sig), true); // Test invalid signature lengths. for (const i of [-2, -1, 1, 2, 4, 8]) { @@ -552,6 +555,14 @@ assert.throws( ok ); + assert.strictEqual( + crypto.createVerify('sha256').update(data).verify({ + key: fixtures.readKey('ec-key.pem'), + dsaEncoding: 'ieee-p1363' + }, extSig), + ok + ); + extSig[Math.floor(Math.random() * extSig.length)] ^= 1; } diff --git a/test/parallel/test-crypto-update-encoding.js b/test/parallel/test-crypto-update-encoding.js new file mode 100644 index 00000000000..e1e6d029aa5 --- /dev/null +++ b/test/parallel/test-crypto-update-encoding.js @@ -0,0 +1,22 @@ +'use strict'; +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const crypto = require('crypto'); + +const zeros = Buffer.alloc; +const key = zeros(16); +const iv = zeros(16); + +const cipher = () => crypto.createCipheriv('aes-128-cbc', key, iv); +const decipher = () => crypto.createDecipheriv('aes-128-cbc', key, iv); +const hash = () => crypto.createSign('sha256'); +const hmac = () => crypto.createHmac('sha256', key); +const sign = () => crypto.createSign('sha256'); +const verify = () => crypto.createVerify('sha256'); + +for (const f of [cipher, decipher, hash, hmac, sign, verify]) + for (const n of [15, 16]) + f().update(zeros(n), 'hex'); // Should ignore inputEncoding. diff --git a/test/parallel/test-debug-usage.js b/test/parallel/test-debug-usage.js index 4582ac7bb5b..eb9594f236b 100644 --- a/test/parallel/test-debug-usage.js +++ b/test/parallel/test-debug-usage.js @@ -11,8 +11,7 @@ child.stderr.setEncoding('utf8'); const expectedLines = [ /^\(node:\d+\) \[DEP0068\] DeprecationWarning:/, - /^Usage: .*node.* debug script\.js$/, - /^ .*node.* debug :$/ + /Usage: .*node.* debug script\.js\r?\n .*node.* debug :\r?\n .*node.* debug -p \r?\n$/, ]; let actualUsageMessage = ''; @@ -21,11 +20,10 @@ child.stderr.on('data', function(data) { }); child.on('exit', common.mustCall(function(code) { - const outputLines = actualUsageMessage.split('\n'); assert.strictEqual(code, 1); for (let i = 0; i < expectedLines.length; i++) assert.ok( - expectedLines[i].test(outputLines[i]), - `${outputLines[i]} did not match ${expectedLines[i]}` + expectedLines[i].test(actualUsageMessage), + `${actualUsageMessage} did not match ${expectedLines[i]}` ); })); diff --git a/test/parallel/test-event-emitter-remove-all-listeners.js b/test/parallel/test-event-emitter-remove-all-listeners.js index 3dfe65a8b4b..c62183fd08c 100644 --- a/test/parallel/test-event-emitter-remove-all-listeners.js +++ b/test/parallel/test-event-emitter-remove-all-listeners.js @@ -108,3 +108,16 @@ function expect(expected) { ee._events = undefined; assert.strictEqual(ee, ee.removeAllListeners()); } + +{ + const ee = new events.EventEmitter(); + const symbol = Symbol('symbol'); + const noop = common.mustNotCall(); + ee.on(symbol, noop); + + ee.on('removeListener', common.mustCall((...args) => { + assert.deepStrictEqual(args, [symbol, noop]); + })); + + ee.removeAllListeners(); +} diff --git a/test/parallel/test-file-write-stream.js b/test/parallel/test-file-write-stream.js index 1055fac698f..6cab71a4353 100644 --- a/test/parallel/test-file-write-stream.js +++ b/test/parallel/test-file-write-stream.js @@ -63,8 +63,7 @@ file callbacks.close++; console.error('write after end should not be allowed'); - file.write('should not work anymore'); - file.on('error', common.expectsError({ + file.write('should not work anymore', common.expectsError({ code: 'ERR_STREAM_WRITE_AFTER_END', name: 'Error', message: 'write after end' diff --git a/test/parallel/test-file-write-stream2.js b/test/parallel/test-file-write-stream2.js index 2654c8bca20..487b5d44054 100644 --- a/test/parallel/test-file-write-stream2.js +++ b/test/parallel/test-file-write-stream2.js @@ -33,7 +33,7 @@ const filepath = path.join(tmpdir.path, 'write.txt'); const EXPECTED = '012345678910'; -const cb_expected = 'write open drain write drain close error '; +const cb_expected = 'write open drain write drain close '; let cb_occurred = ''; let countDrains = 0; @@ -92,16 +92,11 @@ file.on('drain', function() { file.on('close', function() { cb_occurred += 'close '; assert.strictEqual(file.bytesWritten, EXPECTED.length * 2); - file.write('should not work anymore'); + file.write('should not work anymore', (err) => { + assert.ok(err.message.includes('write after end')); + }); }); - -file.on('error', function(err) { - cb_occurred += 'error '; - assert.ok(err.message.includes('write after end')); -}); - - for (let i = 0; i < 11; i++) { const ret = file.write(String(i)); console.error(`${i} ${ret}`); diff --git a/test/parallel/test-fs-fchown.js b/test/parallel/test-fs-fchown.js index cf28d54e9d9..03a9ef3780a 100644 --- a/test/parallel/test-fs-fchown.js +++ b/test/parallel/test-fs-fchown.js @@ -44,7 +44,7 @@ function testGid(input, errObj) { testGid(input, errObj); }); -[-1, 2 ** 32].forEach((input) => { +[-2, 2 ** 32].forEach((input) => { const errObj = { code: 'ERR_OUT_OF_RANGE', name: 'RangeError', @@ -52,8 +52,8 @@ function testGid(input, errObj) { `>= 0 && <= 2147483647. Received ${input}` }; testFd(input, errObj); - errObj.message = 'The value of "uid" is out of range. It must be >= 0 && ' + - `< 4294967296. Received ${input}`; + errObj.message = 'The value of "uid" is out of range. It must be >= -1 && ' + + `<= 4294967295. Received ${input}`; testUid(input, errObj); errObj.message = errObj.message.replace('uid', 'gid'); testGid(input, errObj); diff --git a/test/parallel/test-fs-promises-exists.js b/test/parallel/test-fs-promises-exists.js new file mode 100644 index 00000000000..d56308257e6 --- /dev/null +++ b/test/parallel/test-fs-promises-exists.js @@ -0,0 +1,6 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); + +assert.strictEqual(require('fs/promises'), require('fs').promises); diff --git a/test/parallel/test-fs-read-stream-autoClose.js b/test/parallel/test-fs-read-stream-autoClose.js new file mode 100644 index 00000000000..e7989ee7911 --- /dev/null +++ b/test/parallel/test-fs-read-stream-autoClose.js @@ -0,0 +1,16 @@ +'use strict'; + +const common = require('../common'); +const fs = require('fs'); +const path = require('path'); +const assert = require('assert'); +const tmpdir = require('../common/tmpdir'); +const writeFile = path.join(tmpdir.path, 'write-autoClose.txt'); +tmpdir.refresh(); + +const file = fs.createWriteStream(writeFile, { autoClose: true }); + +file.on('finish', common.mustCall(() => { + assert.strictEqual(file.destroyed, false); +})); +file.end('asd'); diff --git a/test/parallel/test-fs-write-stream-autoclose-option.js b/test/parallel/test-fs-write-stream-autoclose-option.js index e39f4d615ab..8d205fbc69f 100644 --- a/test/parallel/test-fs-write-stream-autoclose-option.js +++ b/test/parallel/test-fs-write-stream-autoclose-option.js @@ -27,8 +27,8 @@ function next() { stream.end(); stream.on('finish', common.mustCall(function() { assert.strictEqual(stream.closed, false); - assert.strictEqual(stream.fd, null); stream.on('close', common.mustCall(function() { + assert.strictEqual(stream.fd, null); assert.strictEqual(stream.closed, true); process.nextTick(next2); })); @@ -51,8 +51,8 @@ function next3() { stream.end(); stream.on('finish', common.mustCall(function() { assert.strictEqual(stream.closed, false); - assert.strictEqual(stream.fd, null); stream.on('close', common.mustCall(function() { + assert.strictEqual(stream.fd, null); assert.strictEqual(stream.closed, true); })); })); diff --git a/test/parallel/test-fs-write-stream.js b/test/parallel/test-fs-write-stream.js index f84b727c866..9f422a64437 100644 --- a/test/parallel/test-fs-write-stream.js +++ b/test/parallel/test-fs-write-stream.js @@ -56,13 +56,12 @@ tmpdir.refresh(); // Throws if data is not of type Buffer. { const stream = fs.createWriteStream(file); - stream.on('error', common.expectsError({ + stream.on('error', common.mustNotCall()); + assert.throws(() => { + stream.write(42); + }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError' - })); - stream.write(42, null, common.expectsError({ - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError' - })); + }); stream.destroy(); } diff --git a/test/parallel/test-gc-net-timeout.js b/test/parallel/test-gc-net-timeout.js index 51d9b8ca09b..9ba6d2bc174 100644 --- a/test/parallel/test-gc-net-timeout.js +++ b/test/parallel/test-gc-net-timeout.js @@ -12,6 +12,9 @@ function serverHandler(sock) { sock.on('close', function() { clearTimeout(timer); }); + sock.on('end', function() { + clearTimeout(timer); + }); sock.on('error', function(err) { assert.strictEqual(err.code, 'ECONNRESET'); }); diff --git a/test/parallel/test-http-agent-timeout-option.js b/test/parallel/test-http-agent-timeout-option.js index d0c05827f23..60a86779838 100644 --- a/test/parallel/test-http-agent-timeout-option.js +++ b/test/parallel/test-http-agent-timeout-option.js @@ -18,6 +18,6 @@ request.on('socket', mustCall((socket) => { const listeners = socket.listeners('timeout'); - strictEqual(listeners.length, 1); - strictEqual(listeners[0], request.timeoutCb); + strictEqual(listeners.length, 2); + strictEqual(listeners[1], request.timeoutCb); })); diff --git a/test/parallel/test-http-agent-timeout.js b/test/parallel/test-http-agent-timeout.js new file mode 100644 index 00000000000..d8d34414d99 --- /dev/null +++ b/test/parallel/test-http-agent-timeout.js @@ -0,0 +1,94 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); + +{ + // Ensure reuse of successful sockets. + + const agent = new http.Agent({ keepAlive: true }); + + const server = http.createServer((req, res) => { + res.end(); + }); + + server.listen(0, common.mustCall(() => { + let socket; + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + socket = res.socket; + assert(socket); + res.resume(); + socket.on('free', common.mustCall(() => { + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + assert.strictEqual(socket, res.socket); + assert(socket); + agent.destroy(); + server.close(); + })); + })); + })); + })); +} + +{ + // Ensure that timeouted sockets are not reused. + + const agent = new http.Agent({ keepAlive: true, timeout: 50 }); + + const server = http.createServer((req, res) => { + res.end(); + }); + + server.listen(0, common.mustCall(() => { + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + const socket = res.socket; + assert(socket); + res.resume(); + socket.on('free', common.mustCall(() => { + socket.on('timeout', common.mustCall(() => { + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + assert.notStrictEqual(socket, res.socket); + assert.strictEqual(socket.destroyed, true); + agent.destroy(); + server.close(); + })); + })); + })); + })); + })); +} + +{ + // Ensure that destroyed sockets are not reused. + + const agent = new http.Agent({ keepAlive: true }); + + const server = http.createServer((req, res) => { + res.end(); + }); + + server.listen(0, common.mustCall(() => { + let socket; + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + socket = res.socket; + assert(socket); + res.resume(); + socket.on('free', common.mustCall(() => { + socket.destroy(); + http.get({ port: server.address().port, agent }) + .on('response', common.mustCall((res) => { + assert.notStrictEqual(socket, res.socket); + assert(socket); + agent.destroy(); + server.close(); + })); + })); + })); + })); +} diff --git a/test/parallel/test-http-client-abort-destroy.js b/test/parallel/test-http-client-abort-destroy.js new file mode 100644 index 00000000000..6db2ea5682e --- /dev/null +++ b/test/parallel/test-http-client-abort-destroy.js @@ -0,0 +1,71 @@ +'use strict'; +const common = require('../common'); +const http = require('http'); +const assert = require('assert'); + +{ + // abort + + const server = http.createServer(common.mustCall((req, res) => { + res.end('Hello'); + })); + + server.listen(0, common.mustCall(() => { + const options = { port: server.address().port }; + const req = http.get(options, common.mustCall((res) => { + res.on('data', (data) => { + req.abort(); + assert.strictEqual(req.aborted, true); + assert.strictEqual(req.destroyed, true); + server.close(); + }); + })); + req.on('error', common.mustNotCall()); + assert.strictEqual(req.aborted, false); + assert.strictEqual(req.destroyed, false); + })); +} + +{ + // destroy + res + + const server = http.createServer(common.mustCall((req, res) => { + res.end('Hello'); + })); + + server.listen(0, common.mustCall(() => { + const options = { port: server.address().port }; + const req = http.get(options, common.mustCall((res) => { + res.on('data', (data) => { + req.destroy(); + assert.strictEqual(req.aborted, false); + assert.strictEqual(req.destroyed, true); + server.close(); + }); + })); + req.on('error', common.mustNotCall()); + assert.strictEqual(req.aborted, false); + assert.strictEqual(req.destroyed, false); + })); +} + +{ + // destroy + + const server = http.createServer(common.mustNotCall((req, res) => { + })); + + server.listen(0, common.mustCall(() => { + const options = { port: server.address().port }; + const req = http.get(options, common.mustNotCall()); + req.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ECONNRESET'); + server.close(); + })); + assert.strictEqual(req.aborted, false); + assert.strictEqual(req.destroyed, false); + req.destroy(); + assert.strictEqual(req.aborted, false); + assert.strictEqual(req.destroyed, true); + })); +} diff --git a/test/parallel/test-http-client-abort-keep-alive-queued-tcp-socket.js b/test/parallel/test-http-client-abort-keep-alive-queued-tcp-socket.js index 6282aa3da7c..c9614f01c3d 100644 --- a/test/parallel/test-http-client-abort-keep-alive-queued-tcp-socket.js +++ b/test/parallel/test-http-client-abort-keep-alive-queued-tcp-socket.js @@ -3,34 +3,45 @@ const common = require('../common'); const assert = require('assert'); const http = require('http'); -let socketsCreated = 0; -class Agent extends http.Agent { - createConnection(options, oncreate) { - const socket = super.createConnection(options, oncreate); - socketsCreated++; - return socket; +for (const destroyer of ['destroy', 'abort']) { + let socketsCreated = 0; + + class Agent extends http.Agent { + createConnection(options, oncreate) { + const socket = super.createConnection(options, oncreate); + socketsCreated++; + return socket; + } } -} -const server = http.createServer((req, res) => res.end()); + const server = http.createServer((req, res) => res.end()); -server.listen(0, common.mustCall(() => { - const port = server.address().port; - const agent = new Agent({ - keepAlive: true, - maxSockets: 1 - }); + server.listen(0, common.mustCall(() => { + const port = server.address().port; + const agent = new Agent({ + keepAlive: true, + maxSockets: 1 + }); - http.get({ agent, port }, (res) => res.resume()); + http.get({ agent, port }, (res) => res.resume()); - const req = http.get({ agent, port }, common.mustNotCall()); - req.abort(); + const req = http.get({ agent, port }, common.mustNotCall()); + req[destroyer](); - http.get({ agent, port }, common.mustCall((res) => { - res.resume(); - assert.strictEqual(socketsCreated, 1); - agent.destroy(); - server.close(); + if (destroyer === 'destroy') { + req.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ECONNRESET'); + })); + } else { + req.on('error', common.mustNotCall()); + } + + http.get({ agent, port }, common.mustCall((res) => { + res.resume(); + assert.strictEqual(socketsCreated, 1); + agent.destroy(); + server.close(); + })); })); -})); +} diff --git a/test/parallel/test-http-client-close-event.js b/test/parallel/test-http-client-close-event.js index 7573931ac48..b539423a80f 100644 --- a/test/parallel/test-http-client-close-event.js +++ b/test/parallel/test-http-client-close-event.js @@ -14,12 +14,12 @@ server.listen(0, common.mustCall(() => { const req = http.get({ port: server.address().port }, common.mustNotCall()); let errorEmitted = false; - req.on('error', (err) => { + req.on('error', common.mustCall((err) => { errorEmitted = true; assert.strictEqual(err.constructor, Error); assert.strictEqual(err.message, 'socket hang up'); assert.strictEqual(err.code, 'ECONNRESET'); - }); + })); req.on('close', common.mustCall(() => { assert.strictEqual(errorEmitted, true); diff --git a/test/parallel/test-http-client-set-timeout-after-end.js b/test/parallel/test-http-client-set-timeout-after-end.js index 99bbf3dd1bc..93eab80938a 100644 --- a/test/parallel/test-http-client-set-timeout-after-end.js +++ b/test/parallel/test-http-client-set-timeout-after-end.js @@ -20,7 +20,7 @@ server.listen(0, () => { const req = get({ agent, port }, (res) => { res.on('end', () => { strictEqual(req.setTimeout(0), req); - strictEqual(socket.listenerCount('timeout'), 0); + strictEqual(socket.listenerCount('timeout'), 1); agent.destroy(); server.close(); }); diff --git a/test/parallel/test-http-client-set-timeout.js b/test/parallel/test-http-client-set-timeout.js index 7717b7d6069..51b6622a6b7 100644 --- a/test/parallel/test-http-client-set-timeout.js +++ b/test/parallel/test-http-client-set-timeout.js @@ -42,7 +42,7 @@ server.listen(0, mustCall(() => { })); req.on('timeout', mustCall(() => { - strictEqual(req.socket.listenerCount('timeout'), 0); + strictEqual(req.socket.listenerCount('timeout'), 1); req.destroy(); })); })); diff --git a/test/parallel/test-http-client-timeout-option-listeners.js b/test/parallel/test-http-client-timeout-option-listeners.js index 727b5fddf09..dac89b5fd1a 100644 --- a/test/parallel/test-http-client-timeout-option-listeners.js +++ b/test/parallel/test-http-client-timeout-option-listeners.js @@ -24,9 +24,9 @@ const options = { server.listen(0, options.host, common.mustCall(() => { options.port = server.address().port; doRequest(common.mustCall((numListeners) => { - assert.strictEqual(numListeners, 1); + assert.strictEqual(numListeners, 2); doRequest(common.mustCall((numListeners) => { - assert.strictEqual(numListeners, 1); + assert.strictEqual(numListeners, 2); server.close(); agent.destroy(); })); diff --git a/test/parallel/test-http-client-timeout-option-with-agent.js b/test/parallel/test-http-client-timeout-option-with-agent.js index 594dd1215f4..833c21c8929 100644 --- a/test/parallel/test-http-client-timeout-option-with-agent.js +++ b/test/parallel/test-http-client-timeout-option-with-agent.js @@ -18,6 +18,6 @@ request.on('socket', mustCall((socket) => { const listeners = socket.listeners('timeout'); - strictEqual(listeners.length, 1); - strictEqual(listeners[0], request.timeoutCb); + strictEqual(listeners.length, 2); + strictEqual(listeners[1], request.timeoutCb); })); diff --git a/test/parallel/test-http-flush.js b/test/parallel/test-http-flush.js deleted file mode 100644 index 24f43d5efec..00000000000 --- a/test/parallel/test-http-flush.js +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -require('../common'); -const http = require('http'); - -http.createServer(function(req, res) { - res.end('ok'); - this.close(); -}).listen(0, '127.0.0.1', function() { - const req = http.request({ - method: 'POST', - host: '127.0.0.1', - port: this.address().port, - }); - req.flush(); // Flush the request headers. - req.flush(); // Should be idempotent. -}); diff --git a/test/parallel/test-http-outgoing-proto.js b/test/parallel/test-http-outgoing-proto.js index b037c88c683..4a07d18c601 100644 --- a/test/parallel/test-http-outgoing-proto.js +++ b/test/parallel/test-http-outgoing-proto.js @@ -1,5 +1,5 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const http = require('http'); @@ -62,13 +62,16 @@ assert.throws(() => { { const outgoingMessage = new OutgoingMessage(); - outgoingMessage.on('error', common.expectsError({ - code: 'ERR_METHOD_NOT_IMPLEMENTED', - name: 'Error', - message: 'The _implicitHeader() method is not implemented' - })); - - outgoingMessage.write(''); + assert.throws( + () => { + outgoingMessage.write(''); + }, + { + code: 'ERR_METHOD_NOT_IMPLEMENTED', + name: 'Error', + message: 'The _implicitHeader() method is not implemented' + } + ); } assert(OutgoingMessage.prototype.write.call({ _header: 'test' })); @@ -119,3 +122,10 @@ assert.throws(() => { name: 'TypeError', message: 'Invalid character in trailer content ["404"]' }); + +{ + const outgoingMessage = new OutgoingMessage(); + assert.strictEqual(outgoingMessage.destroyed, false); + outgoingMessage.destroy(); + assert.strictEqual(outgoingMessage.destroyed, true); +} diff --git a/test/parallel/test-http-server-write-end-after-end.js b/test/parallel/test-http-server-write-end-after-end.js new file mode 100644 index 00000000000..37fbe062f12 --- /dev/null +++ b/test/parallel/test-http-server-write-end-after-end.js @@ -0,0 +1,27 @@ +'use strict'; + +const common = require('../common'); +const http = require('http'); + +const server = http.createServer(handle); + +function handle(req, res) { + res.on('error', common.mustCall((err) => { + common.expectsError({ + code: 'ERR_STREAM_WRITE_AFTER_END', + name: 'Error' + })(err); + server.close(); + })); + + res.write('hello'); + res.end(); + + setImmediate(common.mustCall(() => { + res.end('world'); + })); +} + +server.listen(0, common.mustCall(() => { + http.get(`http://localhost:${server.address().port}`); +})); diff --git a/test/parallel/test-http-uncaught-from-request-callback.js b/test/parallel/test-http-uncaught-from-request-callback.js new file mode 100644 index 00000000000..5c759586178 --- /dev/null +++ b/test/parallel/test-http-uncaught-from-request-callback.js @@ -0,0 +1,29 @@ +'use strict'; +const common = require('../common'); +const asyncHooks = require('async_hooks'); +const http = require('http'); + +// Regression test for https://github.com/nodejs/node/issues/31796 + +asyncHooks.createHook({ + after: () => {} +}).enable(); + + +process.once('uncaughtException', common.mustCall(() => { + server.close(); +})); + +const server = http.createServer(common.mustCall((request, response) => { + response.writeHead(200, { 'Content-Type': 'text/plain' }); + response.end(); +})); + +server.listen(0, common.mustCall(() => { + http.get({ + host: 'localhost', + port: server.address().port + }, common.mustCall(() => { + throw new Error('whoah'); + })); +})); diff --git a/test/parallel/test-http2-binding.js b/test/parallel/test-http2-binding.js index 81f49691e3e..e81a58dfe8a 100644 --- a/test/parallel/test-http2-binding.js +++ b/test/parallel/test-http2-binding.js @@ -170,7 +170,16 @@ const expectedHeaderNames = { HTTP2_HEADER_CONTENT_MD5: 'content-md5', HTTP2_HEADER_TE: 'te', HTTP2_HEADER_UPGRADE: 'upgrade', - HTTP2_HEADER_HTTP2_SETTINGS: 'http2-settings' + HTTP2_HEADER_HTTP2_SETTINGS: 'http2-settings', + HTTP2_HEADER_X_XSS_PROTECTION: 'x-xss-protection', + HTTP2_HEADER_ALT_SVC: 'alt-svc', + HTTP2_HEADER_CONTENT_SECURITY_POLICY: 'content-security-policy', + HTTP2_HEADER_EARLY_DATA: 'early-data', + HTTP2_HEADER_EXPECT_CT: 'expect-ct', + HTTP2_HEADER_ORIGIN: 'origin', + HTTP2_HEADER_PURPOSE: 'purpose', + HTTP2_HEADER_TIMING_ALLOW_ORIGIN: 'timing-allow-origin', + HTTP2_HEADER_X_FORWARDED_FOR: 'x-forwarded-for', }; const expectedNGConstants = { diff --git a/test/parallel/test-http2-compat-serverresponse-end.js b/test/parallel/test-http2-compat-serverresponse-end.js index 5bbb24bb2ed..8505d6c4969 100644 --- a/test/parallel/test-http2-compat-serverresponse-end.js +++ b/test/parallel/test-http2-compat-serverresponse-end.js @@ -149,11 +149,13 @@ const { // Http2ServerResponse.end is necessary on HEAD requests in compat // for http1 compatibility const server = createServer(mustCall((request, response) => { - strictEqual(response.finished, true); strictEqual(response.writableEnded, false); + strictEqual(response.finished, false); response.writeHead(HTTP_STATUS_OK, { foo: 'bar' }); + strictEqual(response.finished, false); response.end('data', mustCall()); strictEqual(response.writableEnded, true); + strictEqual(response.finished, true); })); server.listen(0, mustCall(() => { const { port } = server.address(); diff --git a/test/parallel/test-http2-server-stream-session-destroy.js b/test/parallel/test-http2-server-stream-session-destroy.js index b25dfbb347d..a03569cdee7 100644 --- a/test/parallel/test-http2-server-stream-session-destroy.js +++ b/test/parallel/test-http2-server-stream-session-destroy.js @@ -34,11 +34,9 @@ server.on('stream', common.mustCall((stream) => { name: 'Error' } ); - stream.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_WRITE_AFTER_END', - message: 'write after end' - })); + // When session is detroyed all streams are destroyed and no further + // error should be emitted. + stream.on('error', common.mustNotCall()); assert.strictEqual(stream.write('data', common.expectsError({ name: 'Error', code: 'ERR_STREAM_WRITE_AFTER_END', diff --git a/test/sequential/test-inspector-module.js b/test/parallel/test-inspector-module.js similarity index 100% rename from test/sequential/test-inspector-module.js rename to test/parallel/test-inspector-module.js diff --git a/test/parallel/test-internal-errors.js b/test/parallel/test-internal-errors.js index fbb8a0a86a3..7bcc7dcc330 100644 --- a/test/parallel/test-internal-errors.js +++ b/test/parallel/test-internal-errors.js @@ -1,6 +1,6 @@ // Flags: --expose-internals 'use strict'; -const common = require('../common'); +require('../common'); const { hijackStdout, restoreStdout, @@ -50,10 +50,13 @@ errors.E('TEST_ERROR_2', (a, b) => `${a} ${b}`, Error); } { - common.expectsInternalAssertion( + assert.throws( () => new errors.codes.TEST_ERROR_1(), - 'Code: TEST_ERROR_1; The provided arguments ' + - 'length (0) does not match the required ones (1).' + { + message: /^Code: TEST_ERROR_1; The provided arguments length \(0\) does not match the required ones \(1\)\./, + name: 'Error', + code: 'ERR_INTERNAL_ASSERTION' + } ); } diff --git a/test/parallel/test-internal-validators-validateport.js b/test/parallel/test-internal-validators-validateport.js new file mode 100644 index 00000000000..ea9c3a7b58b --- /dev/null +++ b/test/parallel/test-internal-validators-validateport.js @@ -0,0 +1,23 @@ +// Flags: --expose-internals +'use strict'; + +require('../common'); +const assert = require('assert'); +const { validatePort } = require('internal/validators'); + +for (let n = 0; n <= 0xFFFF; n++) { + validatePort(n); + validatePort(`${n}`); + validatePort(`0x${n.toString(16)}`); + validatePort(`0o${n.toString(8)}`); + validatePort(`0b${n.toString(2)}`); +} + +[ + -1, 'a', {}, [], false, true, + 0xFFFF + 1, Infinity, -Infinity, NaN, + undefined, null, '', ' ', 1.1, '0x', + '-0x1', '-0o1', '-0b1', '0o', '0b' +].forEach((i) => assert.throws(() => validatePort(i), { + code: 'ERR_SOCKET_BAD_PORT' +})); diff --git a/test/parallel/test-net-dns-error.js b/test/parallel/test-net-dns-error.js index bb90eafc3d9..7232ef10ebb 100644 --- a/test/parallel/test-net-dns-error.js +++ b/test/parallel/test-net-dns-error.js @@ -26,15 +26,16 @@ const assert = require('assert'); const net = require('net'); const host = '*'.repeat(64); -const errCode = common.isOpenBSD ? 'EAI_FAIL' : 'ENOTFOUND'; +// Resolving hostname > 63 characters may return EAI_FAIL (permanent failure). +const errCodes = ['ENOTFOUND', 'EAI_FAIL']; const socket = net.connect(42, host, common.mustNotCall()); socket.on('error', common.mustCall(function(err) { - assert.strictEqual(err.code, errCode); + assert(errCodes.includes(err.code), err); })); socket.on('lookup', common.mustCall(function(err, ip, type) { assert(err instanceof Error); - assert.strictEqual(err.code, errCode); + assert(errCodes.includes(err.code), err); assert.strictEqual(ip, undefined); assert.strictEqual(type, undefined); })); diff --git a/test/parallel/test-net-internal.js b/test/parallel/test-net-internal.js deleted file mode 100644 index 309b56d4d9a..00000000000 --- a/test/parallel/test-net-internal.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict'; - -// Flags: --expose-internals - -require('../common'); -const assert = require('assert'); -const isLegalPort = require('internal/net').isLegalPort; - -for (let n = 0; n <= 0xFFFF; n++) { - assert(isLegalPort(n)); - assert(isLegalPort(String(n))); - assert(`0x${n.toString(16)}`); - assert(`0o${n.toString(8)}`); - assert(`0b${n.toString(2)}`); -} - -const bad = [-1, 'a', {}, [], false, true, 0xFFFF + 1, Infinity, - -Infinity, NaN, undefined, null, '', ' ', 1.1, '0x', - '-0x1', '-0o1', '-0b1', '0o', '0b']; -bad.forEach((i) => assert(!isLegalPort(i))); diff --git a/test/parallel/test-net-socket-destroy-send.js b/test/parallel/test-net-socket-destroy-send.js index 54dadd94861..db792ad6d3b 100644 --- a/test/parallel/test-net-socket-destroy-send.js +++ b/test/parallel/test-net-socket-destroy-send.js @@ -12,11 +12,7 @@ server.listen(0, common.mustCall(function() { conn.on('connect', common.mustCall(function() { // Test destroy returns this, even on multiple calls when it short-circuits. assert.strictEqual(conn, conn.destroy().destroy()); - conn.on('error', common.expectsError({ - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed', - name: 'Error' - })); + conn.on('error', common.mustNotCall()); conn.write(Buffer.from('kaboom'), common.expectsError({ code: 'ERR_STREAM_DESTROYED', diff --git a/test/parallel/test-net-socket-write-error.js b/test/parallel/test-net-socket-write-error.js index ab748480ea3..e68db68c0d4 100644 --- a/test/parallel/test-net-socket-write-error.js +++ b/test/parallel/test-net-socket-write-error.js @@ -2,19 +2,19 @@ const common = require('../common'); const net = require('net'); +const assert = require('assert'); const server = net.createServer().listen(0, connectToServer); function connectToServer() { const client = net.createConnection(this.address().port, () => { - client.write(1337, common.expectsError({ + client.on('error', common.mustNotCall()); + assert.throws(() => { + client.write(1337); + }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError' - })); - client.on('error', common.expectsError({ - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError' - })); + }); client.destroy(); }) diff --git a/test/parallel/test-net-write-arguments.js b/test/parallel/test-net-write-arguments.js index 407871afe6d..2b81ed7d6a3 100644 --- a/test/parallel/test-net-write-arguments.js +++ b/test/parallel/test-net-write-arguments.js @@ -1,20 +1,18 @@ 'use strict'; const common = require('../common'); const net = require('net'); - +const assert = require('assert'); const socket = net.Stream({ highWaterMark: 0 }); // Make sure that anything besides a buffer or a string throws. -socket.write(null, common.expectsError({ +socket.on('error', common.mustNotCall()); +assert.throws(() => { + socket.write(null); +}, { code: 'ERR_STREAM_NULL_VALUES', name: 'TypeError', message: 'May not write null values to stream' -})); -socket.on('error', common.expectsError({ - code: 'ERR_STREAM_NULL_VALUES', - name: 'TypeError', - message: 'May not write null values to stream' -})); +}); [ true, @@ -29,10 +27,12 @@ socket.on('error', common.expectsError({ ].forEach((value) => { // We need to check the callback since 'error' will only // be emitted once per instance. - socket.write(value, common.expectsError({ + assert.throws(() => { + socket.write(value); + }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', message: 'The "chunk" argument must be of type string or an instance of ' + `Buffer or Uint8Array.${common.invalidArgTypeHelper(value)}` - })); + }); }); diff --git a/test/parallel/test-os.js b/test/parallel/test-os.js index 57b68ff8dea..5ee0fb9ca8d 100644 --- a/test/parallel/test-os.js +++ b/test/parallel/test-os.js @@ -194,6 +194,10 @@ const home = os.homedir(); is.string(home); assert.ok(home.includes(path.sep)); +const version = os.version(); +assert.strictEqual(typeof version, 'string'); +assert(version); + if (common.isWindows && process.env.USERPROFILE) { assert.strictEqual(home, process.env.USERPROFILE); delete process.env.USERPROFILE; diff --git a/test/parallel/test-repl-preview.js b/test/parallel/test-repl-preview.js index 82f1a9e507e..b36b99cca7c 100644 --- a/test/parallel/test-repl-preview.js +++ b/test/parallel/test-repl-preview.js @@ -88,6 +88,23 @@ async function tests(options) { '\x1B[36m[Function: koo]\x1B[39m', '\x1B[1G\x1B[0Jrepl > \x1B[8G'], ['a', [1, 2], undefined], + [" { b: 1 }['b'] === 1", [2, 6], '\x1B[33mtrue\x1B[39m', + " { b: 1 }['b']", + '\x1B[90m1\x1B[39m\x1B[22G\x1B[1A\x1B[1B\x1B[2K\x1B[1A ', + '\x1B[90m1\x1B[39m\x1B[23G\x1B[1A\x1B[1B\x1B[2K\x1B[1A=== 1', + '\x1B[90mtrue\x1B[39m\x1B[28G\x1B[1A\x1B[1B\x1B[2K\x1B[1A\r', + '\x1B[33mtrue\x1B[39m', + '\x1B[1G\x1B[0Jrepl > \x1B[8G' + ], + ["{ b: 1 }['b'] === 1;", [2, 7], '\x1B[33mfalse\x1B[39m', + "{ b: 1 }['b']", + '\x1B[90m1\x1B[39m\x1B[21G\x1B[1A\x1B[1B\x1B[2K\x1B[1A ', + '\x1B[90m1\x1B[39m\x1B[22G\x1B[1A\x1B[1B\x1B[2K\x1B[1A=== 1', + '\x1B[90mtrue\x1B[39m\x1B[27G\x1B[1A\x1B[1B\x1B[2K\x1B[1A;', + '\x1B[90mfalse\x1B[39m\x1B[28G\x1B[1A\x1B[1B\x1B[2K\x1B[1A\r', + '\x1B[33mfalse\x1B[39m', + '\x1B[1G\x1B[0Jrepl > \x1B[8G' + ], ['{ a: true }', [2, 3], '{ a: \x1B[33mtrue\x1B[39m }', '{ a: tru\x1B[90me\x1B[39m\x1B[16G\x1B[0Ke }\r', '{ a: \x1B[33mtrue\x1B[39m }', diff --git a/test/parallel/test-repl.js b/test/parallel/test-repl.js index b0dad397ccc..67ec86cc2c5 100644 --- a/test/parallel/test-repl.js +++ b/test/parallel/test-repl.js @@ -861,7 +861,7 @@ function event(ee, expected) { const data = inspect(expected, { compact: false }); const msg = `The REPL did not reply as expected for:\n\n${data}`; reject(new Error(msg)); - }, common.platformTimeout(500)); + }, common.platformTimeout(1000)); ee.once('data', common.mustCall((...args) => { clearTimeout(timeout); resolve(...args); diff --git a/test/parallel/test-require-empty-main.js b/test/parallel/test-require-empty-main.js new file mode 100644 index 00000000000..73f141d1f9e --- /dev/null +++ b/test/parallel/test-require-empty-main.js @@ -0,0 +1,25 @@ +'use strict'; +require('../common'); + +// A package.json with an empty "main" property should use index.js if present. +// require.resolve() should resolve to index.js for the same reason. +// +// In fact, any "main" property that doesn't resolve to a file should result +// in index.js being used, but that's already checked for by other tests. +// This test only concerns itself with the empty string. + +const assert = require('assert'); +const path = require('path'); +const fixtures = require('../common/fixtures'); + +const where = fixtures.path('require-empty-main'); +const expected = path.join(where, 'index.js'); + +test(); +setImmediate(test); + +function test() { + assert.strictEqual(require.resolve(where), expected); + assert.strictEqual(require(where), 42); + assert.strictEqual(require.resolve(where), expected); +} diff --git a/test/parallel/test-source-map-api.js b/test/parallel/test-source-map-api.js index 2bfbc08809e..60bbb661e1c 100644 --- a/test/parallel/test-source-map-api.js +++ b/test/parallel/test-source-map-api.js @@ -124,3 +124,28 @@ const { readFileSync } = require('fs'); assert.strictEqual(originalColumn, knownDecodings[column]); } } + +// Test that generated columns are sorted when a negative offset is +// observed, see: https://github.com/mozilla/source-map/pull/92 +{ + function makeMinimalMap(generatedColumns, originalColumns) { + return { + sources: ['test.js'], + // Mapping from the 0th line, ${g}th column of the output file to the 0th + // source file, 0th line, ${column}th column. + mappings: generatedColumns.map((g, i) => `${g}AA${originalColumns[i]}`) + .join(',') + }; + } + // U = 10 + // F = -2 + // A = 0 + // E = 2 + const sourceMap = new SourceMap(makeMinimalMap( + ['U', 'F', 'F'], + ['A', 'E', 'E'] + )); + assert.strictEqual(sourceMap.findEntry(0, 6).originalColumn, 4); + assert.strictEqual(sourceMap.findEntry(0, 8).originalColumn, 2); + assert.strictEqual(sourceMap.findEntry(0, 10).originalColumn, 0); +} diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js index fb5f1fccc18..848c2ada130 100644 --- a/test/parallel/test-stream-catch-rejections.js +++ b/test/parallel/test-stream-catch-rejections.js @@ -8,11 +8,10 @@ const assert = require('assert'); const r = new stream.Readable({ captureRejections: true, read() { - this.push('hello'); - this.push('world'); - this.push(null); } }); + r.push('hello'); + r.push('world'); const err = new Error('kaboom'); diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index e866ba3d740..ab35d402e31 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -181,7 +181,7 @@ const { promisify } = require('util'); const streamLike = new EE(); streamLike.readableEnded = true; streamLike.readable = true; - finished(streamLike, common.mustCall); + finished(streamLike, common.mustCall()); streamLike.emit('close'); } @@ -312,7 +312,6 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); })); } - { const r = new Readable({ autoDestroy: false @@ -332,3 +331,24 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); finished(rs, common.mustCall()); })); } + +{ + const d = new EE(); + d._writableState = {}; + d._writableState.finished = true; + finished(d, { readable: false, writable: true }, common.mustCall((err) => { + assert.strictEqual(err, undefined); + })); + d._writableState.errored = true; + d.emit('close'); +} + +{ + const r = new Readable(); + finished(r, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); + r.push('asd'); + r.push(null); + r.destroy(); +} diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js new file mode 100644 index 00000000000..90d141ec44f --- /dev/null +++ b/test/parallel/test-stream-pipeline-uncaught.js @@ -0,0 +1,25 @@ +'use strict'; + +const common = require('../common'); +const { + pipeline, + PassThrough +} = require('stream'); +const assert = require('assert'); + +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'error'); +})); + +// Ensure that pipeline that ends with Promise +// still propagates error to uncaughtException. +const s = new PassThrough(); +s.end('data'); +pipeline(s, async function(source) { + for await (const chunk of source) { + chunk; + } +}, common.mustCall((err) => { + assert.ifError(err); + throw new Error('error'); +})); diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index 19fc246e2bf..7e0cb9193dc 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -505,9 +505,7 @@ const { promisify } = require('util'); res, stream, common.mustCall((err) => { - assert.ok(err); - // TODO(ronag): - // assert.strictEqual(err.message, 'oh no'); + assert.strictEqual(err.message, 'oh no'); server.close(); }) ); @@ -613,11 +611,9 @@ const { promisify } = require('util'); yield 'hello'; yield 'world'; }, async function*(source) { - const ret = []; for await (const chunk of source) { - ret.push(chunk.toUpperCase()); + yield chunk.toUpperCase(); } - yield ret; }, async function(source) { let ret = ''; for await (const chunk of source) { @@ -754,7 +750,6 @@ const { promisify } = require('util'); }, common.mustCall((err) => { assert.strictEqual(err, undefined); assert.strictEqual(ret, 'asd'); - assert.strictEqual(s.destroyed, true); })); } @@ -766,7 +761,10 @@ const { promisify } = require('util'); s.emit('data', 'asd'); s.emit('end'); }); - s.close = common.mustCall(); + // 'destroyer' can be called multiple times, + // once from stream wrapper and + // once from iterator wrapper. + s.close = common.mustCallAtLeast(1); let ret = ''; pipeline(s, async function(source) { for await (const chunk of source) { @@ -775,7 +773,6 @@ const { promisify } = require('util'); }, common.mustCall((err) => { assert.strictEqual(err, undefined); assert.strictEqual(ret, 'asd'); - assert.strictEqual(s.destroyed, true); })); } @@ -913,3 +910,77 @@ const { promisify } = require('util'); assert.strictEqual(err.message, 'kaboom'); })); } + +{ + const src = new PassThrough({ autoDestroy: false }); + const dst = new PassThrough({ autoDestroy: false }); + pipeline(src, dst, common.mustCall(() => { + assert.strictEqual(src.destroyed, true); + assert.strictEqual(dst.destroyed, false); + })); + src.end(); +} + +{ + // Make sure 'close' before 'end' finishes without error + // if readable has received eof. + // Ref: https://github.com/nodejs/node/issues/29699 + const r = new Readable(); + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + } + }); + pipeline(r, w, (err) => { + assert.strictEqual(err, undefined); + }); + r.push('asd'); + r.push(null); + r.emit('close'); +} + +{ + const server = http.createServer((req, res) => { + }); + + server.listen(0, () => { + const req = http.request({ + port: server.address().port + }); + + const body = new PassThrough(); + pipeline( + body, + req, + common.mustCall((err) => { + assert(!err); + assert(!req.res); + assert(!req.aborted); + req.abort(); + server.close(); + }) + ); + body.end(); + }); +} + +{ + const src = new PassThrough(); + const dst = new PassThrough(); + pipeline(src, dst, common.mustCall((err) => { + assert(!err); + assert.strictEqual(dst.destroyed, false); + })); + src.end(); +} + +{ + const src = new PassThrough(); + const dst = new PassThrough(); + dst.readable = false; + pipeline(src, dst, common.mustCall((err) => { + assert(!err); + assert.strictEqual(dst.destroyed, true); + })); + src.end(); +} diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js index 9149058d153..55d16a1c5d3 100644 --- a/test/parallel/test-stream-readable-async-iterators.js +++ b/test/parallel/test-stream-readable-async-iterators.js @@ -14,7 +14,9 @@ async function tests() { { const AsyncIteratorPrototype = Object.getPrototypeOf( Object.getPrototypeOf(async function* () {}).prototype); - const rs = new Readable({}); + const rs = new Readable({ + read() {} + }); assert.strictEqual( Object.getPrototypeOf(Object.getPrototypeOf(rs[Symbol.asyncIterator]())), AsyncIteratorPrototype); diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js index d446dd36117..6caf88a1f15 100644 --- a/test/parallel/test-stream-readable-destroy.js +++ b/test/parallel/test-stream-readable-destroy.js @@ -183,12 +183,12 @@ const assert = require('assert'); let ticked = false; read.on('close', common.mustCall(() => { - assert.strictEqual(read._readableState.errorEmitted, false); + assert.strictEqual(read._readableState.errorEmitted, true); assert.strictEqual(ticked, true); })); - // 'error' should not be emitted since a callback is passed to - // destroy(err, callback); - read.on('error', common.mustNotCall()); + read.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); assert.strictEqual(read._readableState.errored, false); assert.strictEqual(read._readableState.errorEmitted, false); @@ -217,7 +217,7 @@ const assert = require('assert'); })); readable.on('error', common.mustCall((err) => { assert.strictEqual(ticked, true); - assert.strictEqual(err.message, 'kaboom 2'); + assert.strictEqual(err.message, 'kaboom 1'); assert.strictEqual(readable._readableState.errorEmitted, true); })); @@ -230,7 +230,7 @@ const assert = require('assert'); // the `_destroy()` callback is called. readable.destroy(new Error('kaboom 2')); assert.strictEqual(readable._readableState.errorEmitted, false); - assert.strictEqual(readable._readableState.errored, true); + assert.strictEqual(readable._readableState.errored, false); ticked = true; } diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js index 7d48c422535..16ec2ac8cd8 100644 --- a/test/parallel/test-stream-readable-with-unimplemented-_read.js +++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js @@ -1,14 +1,18 @@ 'use strict'; -const common = require('../common'); +require('../common'); +const assert = require('assert'); const { Readable } = require('stream'); const readable = new Readable(); -readable.on('error', common.expectsError({ - code: 'ERR_METHOD_NOT_IMPLEMENTED', - name: 'Error', - message: 'The _read() method is not implemented' -})); - -readable.read(); +assert.throws( + () => { + readable.read(); + }, + { + code: 'ERR_METHOD_NOT_IMPLEMENTED', + name: 'Error', + message: 'The _read() method is not implemented' + } +); diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js index 8a4abd68606..a20a1a07cff 100644 --- a/test/parallel/test-stream-transform-constructor-set-methods.js +++ b/test/parallel/test-stream-transform-constructor-set-methods.js @@ -1,18 +1,21 @@ 'use strict'; const common = require('../common'); -const { strictEqual } = require('assert'); +const assert = require('assert'); const { Transform } = require('stream'); const t = new Transform(); -t.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_METHOD_NOT_IMPLEMENTED', - message: 'The _transform() method is not implemented' -})); - -t.end(Buffer.from('blerg')); +assert.throws( + () => { + t.end(Buffer.from('blerg')); + }, + { + name: 'Error', + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: 'The _transform() method is not implemented' + } +); const _transform = common.mustCall((chunk, _, next) => { next(); @@ -32,9 +35,9 @@ const t2 = new Transform({ final: _final }); -strictEqual(t2._transform, _transform); -strictEqual(t2._flush, _flush); -strictEqual(t2._final, _final); +assert.strictEqual(t2._transform, _transform); +assert.strictEqual(t2._flush, _flush); +assert.strictEqual(t2._final, _final); t2.end(Buffer.from('blerg')); t2.resume(); diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js index c326428210c..34fda8edda9 100644 --- a/test/parallel/test-stream-writable-constructor-set-methods.js +++ b/test/parallel/test-stream-writable-constructor-set-methods.js @@ -1,34 +1,36 @@ 'use strict'; const common = require('../common'); -const { strictEqual } = require('assert'); +const assert = require('assert'); const { Writable } = require('stream'); -const w = new Writable(); - -w.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_METHOD_NOT_IMPLEMENTED', - message: 'The _write() method is not implemented' -})); - const bufferBlerg = Buffer.from('blerg'); +const w = new Writable(); -w.end(bufferBlerg); +assert.throws( + () => { + w.end(bufferBlerg); + }, + { + name: 'Error', + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: 'The _write() method is not implemented' + } +); const _write = common.mustCall((chunk, _, next) => { next(); }); const _writev = common.mustCall((chunks, next) => { - strictEqual(chunks.length, 2); + assert.strictEqual(chunks.length, 2); next(); }); const w2 = new Writable({ write: _write, writev: _writev }); -strictEqual(w2._write, _write); -strictEqual(w2._writev, _writev); +assert.strictEqual(w2._write, _write); +assert.strictEqual(w2._writev, _writev); w2.write(bufferBlerg); diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index 2a9a1965adb..706847a8582 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -187,12 +187,14 @@ const assert = require('assert'); let ticked = false; writable.on('close', common.mustCall(() => { + writable.on('error', common.mustNotCall()); + writable.destroy(new Error('hello')); assert.strictEqual(ticked, true); assert.strictEqual(writable._writableState.errorEmitted, true); })); writable.on('error', common.mustCall((err) => { assert.strictEqual(ticked, true); - assert.strictEqual(err.message, 'kaboom 2'); + assert.strictEqual(err.message, 'kaboom 1'); assert.strictEqual(writable._writableState.errorEmitted, true); })); @@ -205,7 +207,7 @@ const assert = require('assert'); // the `_destroy()` callback is called. writable.destroy(new Error('kaboom 2')); assert.strictEqual(writable._writableState.errorEmitted, false); - assert.strictEqual(writable._writableState.errored, true); + assert.strictEqual(writable._writableState.errored, false); ticked = true; } @@ -246,8 +248,8 @@ const assert = require('assert'); const expected = new Error('kaboom'); - write.destroy(expected, common.mustCall(function(err) { - assert.strictEqual(err, expected); + write.destroy(expected, common.mustCall((err) => { + assert.strictEqual(err, undefined); })); } @@ -271,11 +273,7 @@ const assert = require('assert'); const write = new Writable(); write.destroy(); - write.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed' - })); + write.on('error', common.mustNotCall()); write.write('asd', common.expectsError({ name: 'Error', code: 'ERR_STREAM_DESTROYED', @@ -288,11 +286,7 @@ const assert = require('assert'); write(chunk, enc, cb) { cb(); } }); - write.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed' - })); + write.on('error', common.mustNotCall()); write.cork(); write.write('asd', common.mustCall()); diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js index e5fa624c12a..2c40c62a9ee 100644 --- a/test/parallel/test-stream-writable-ended-state.js +++ b/test/parallel/test-stream-writable-ended-state.js @@ -9,17 +9,24 @@ const writable = new stream.Writable(); writable._write = (chunk, encoding, cb) => { assert.strictEqual(writable._writableState.ended, false); + assert.strictEqual(writable._writableState.writable, undefined); assert.strictEqual(writable.writableEnded, false); cb(); }; assert.strictEqual(writable._writableState.ended, false); +assert.strictEqual(writable._writableState.writable, undefined); +assert.strictEqual(writable.writable, true); assert.strictEqual(writable.writableEnded, false); writable.end('testing ended state', common.mustCall(() => { assert.strictEqual(writable._writableState.ended, true); + assert.strictEqual(writable._writableState.writable, undefined); + assert.strictEqual(writable.writable, false); assert.strictEqual(writable.writableEnded, true); })); assert.strictEqual(writable._writableState.ended, true); +assert.strictEqual(writable._writableState.writable, undefined); +assert.strictEqual(writable.writable, false); assert.strictEqual(writable.writableEnded, true); diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js index 09ee5877c8d..09032c07c59 100644 --- a/test/parallel/test-stream-writable-invalid-chunk.js +++ b/test/parallel/test-stream-writable-invalid-chunk.js @@ -2,20 +2,21 @@ const common = require('../common'); const stream = require('stream'); +const assert = require('assert'); function testWriteType(val, objectMode, code) { const writable = new stream.Writable({ objectMode, write: () => {} }); - if (!code) { - writable.on('error', common.mustNotCall()); + writable.on('error', common.mustNotCall()); + if (code) { + assert.throws(() => { + writable.write(val); + }, { code }); } else { - writable.on('error', common.expectsError({ - code: code, - })); + writable.write(val); } - writable.write(val); } testWriteType([], false, 'ERR_INVALID_ARG_TYPE'); diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js index f26fc62328c..99419f1cf9a 100644 --- a/test/parallel/test-stream-writable-null.js +++ b/test/parallel/test-stream-writable-null.js @@ -16,31 +16,22 @@ class MyWritable extends stream.Writable { { const m = new MyWritable({ objectMode: true }); - m.write(null, (err) => assert.ok(err)); - m.on('error', common.expectsError({ - code: 'ERR_STREAM_NULL_VALUES', - name: 'TypeError', - message: 'May not write null values to stream' - })); -} - -{ // Should not throw. - const m = new MyWritable({ objectMode: true }).on('error', assert); - m.write(null, assert); + m.on('error', common.mustNotCall()); + assert.throws(() => { + m.write(null); + }, { + code: 'ERR_STREAM_NULL_VALUES' + }); } { const m = new MyWritable(); - m.write(false, (err) => assert.ok(err)); - m.on('error', common.expectsError({ - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError' - })); -} - -{ // Should not throw. - const m = new MyWritable().on('error', assert); - m.write(false, assert); + m.on('error', common.mustNotCall()); + assert.throws(() => { + m.write(false); + }, { + code: 'ERR_INVALID_ARG_TYPE' + }); } { // Should not throw. diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js index 98c1eab7719..ef5454dc52e 100644 --- a/test/parallel/test-stream-writable-writable.js +++ b/test/parallel/test-stream-writable-writable.js @@ -23,7 +23,6 @@ const { Writable } = require('stream'); w.write('asd'); assert.strictEqual(w.writable, false); w.on('error', common.mustCall()); - w.destroy(); } { diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js index e23b24a19df..eb9b1db06b7 100644 --- a/test/parallel/test-stream-writable-write-error.js +++ b/test/parallel/test-stream-writable-write-error.js @@ -4,19 +4,27 @@ const assert = require('assert'); const { Writable } = require('stream'); -function expectError(w, arg, code) { - let errorCalled = false; - let ticked = false; - w.write(arg, common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(errorCalled, false); - assert.strictEqual(err.code, code); - })); - ticked = true; - w.on('error', common.mustCall((err) => { - errorCalled = true; - assert.strictEqual(err.code, code); - })); +function expectError(w, arg, code, sync) { + if (sync) { + if (code) { + assert.throws(() => w.write(arg), { code }); + } else { + w.write(arg); + } + } else { + let errorCalled = false; + let ticked = false; + w.write(arg, common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(errorCalled, false); + assert.strictEqual(err.code, code); + })); + ticked = true; + w.on('error', common.mustCall((err) => { + errorCalled = true; + assert.strictEqual(err.code, code); + })); + } } function test(autoDestroy) { @@ -35,7 +43,6 @@ function test(autoDestroy) { _write() {} }); w.destroy(); - expectError(w, 'asd', 'ERR_STREAM_DESTROYED'); } { @@ -43,7 +50,7 @@ function test(autoDestroy) { autoDestroy, _write() {} }); - expectError(w, null, 'ERR_STREAM_NULL_VALUES'); + expectError(w, null, 'ERR_STREAM_NULL_VALUES', true); } { @@ -51,7 +58,7 @@ function test(autoDestroy) { autoDestroy, _write() {} }); - expectError(w, {}, 'ERR_INVALID_ARG_TYPE'); + expectError(w, {}, 'ERR_INVALID_ARG_TYPE', true); } } diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index 0d9bc03fae7..03835bb1bd0 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -422,12 +422,12 @@ const helloWorldBuffer = Buffer.from('hello world'); { // Verify that error is only emitted once when failing in write. const w = new W(); - w.on('error', common.mustCall((err) => { - assert.strictEqual(w._writableState.errorEmitted, true); - assert.strictEqual(err.code, 'ERR_STREAM_NULL_VALUES'); - })); - w.write(null); - w.destroy(new Error()); + w.on('error', common.mustNotCall()); + assert.throws(() => { + w.write(null); + }, { + code: 'ERR_STREAM_NULL_VALUES' + }); } { diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index bfd8a203732..1a385472205 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -108,7 +108,7 @@ function pipeLittle() { console.error('pipe a little'); const w = new Writable(); let written = 0; - w.on('finish', function() { + w.on('finish', () => { assert.strictEqual(written, 200); setImmediate(read1234); }); @@ -160,7 +160,7 @@ function pipe() { written += chunk.length; cb(); }; - w.on('finish', function() { + w.on('finish', () => { console.error('written', written, totalPushed); assert.strictEqual(written, expectEndingData); assert.strictEqual(totalPushed, expectTotalData); diff --git a/test/parallel/test-tls-close-event-after-write.js b/test/parallel/test-tls-close-event-after-write.js index 31ebc897b14..57c79e2e5ab 100644 --- a/test/parallel/test-tls-close-event-after-write.js +++ b/test/parallel/test-tls-close-event-after-write.js @@ -12,23 +12,22 @@ const tls = require('tls'); const fixtures = require('../common/fixtures'); let cconn = null; let sconn = null; +let read_len = 0; +const buffer_size = 1024 * 1024; function test() { if (cconn && sconn) { cconn.resume(); sconn.resume(); - sconn.end(Buffer.alloc(1024 * 1024)); - cconn.end(); + sconn.end(Buffer.alloc(buffer_size)); } } const server = tls.createServer({ key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') -}, function(c) { - c.on('close', function() { - server.close(); - }); +}, (c) => { + c.on('close', common.mustCall(() => server.close())); sconn = c; test(); }).listen(0, common.mustCall(function() { @@ -36,6 +35,12 @@ const server = tls.createServer({ rejectUnauthorized: false }, common.mustCall(function() { cconn = this; + cconn.on('data', (d) => { + read_len += d.length; + if (read_len === buffer_size) { + cconn.end(); + } + }); test(); })); })); diff --git a/test/parallel/test-tls-exportkeyingmaterial.js b/test/parallel/test-tls-exportkeyingmaterial.js new file mode 100644 index 00000000000..b3173f94001 --- /dev/null +++ b/test/parallel/test-tls-exportkeyingmaterial.js @@ -0,0 +1,102 @@ +'use strict'; + +// Test return value of tlsSocket.exportKeyingMaterial + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const net = require('net'); +const tls = require('tls'); +const fixtures = require('../common/fixtures'); + +const key = fixtures.readKey('agent1-key.pem'); +const cert = fixtures.readKey('agent1-cert.pem'); + +const server = net.createServer(common.mustCall((s) => { + const tlsSocket = new tls.TLSSocket(s, { + isServer: true, + server: server, + secureContext: tls.createSecureContext({ key, cert }) + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(128, 'label'); + }, { + name: 'Error', + message: 'TLS socket connection must be securely established', + code: 'ERR_TLS_INVALID_STATE' + }); + + tlsSocket.on('secure', common.mustCall(() => { + const label = 'client finished'; + + const validKeyingMaterial = tlsSocket.exportKeyingMaterial(128, label); + assert.strictEqual(validKeyingMaterial.length, 128); + + const validKeyingMaterialWithContext = tlsSocket + .exportKeyingMaterial(128, label, Buffer.from([0, 1, 2, 3])); + assert.strictEqual(validKeyingMaterialWithContext.length, 128); + + // Ensure providing a context results in a different key than without + assert.notStrictEqual(validKeyingMaterial, validKeyingMaterialWithContext); + + const validKeyingMaterialWithEmptyContext = tlsSocket + .exportKeyingMaterial(128, label, Buffer.from([])); + assert.strictEqual(validKeyingMaterialWithEmptyContext.length, 128); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(128, label, 'stringAsContextNotSupported'); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(128, label, 1234); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(10, null); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial('length', 1234); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(-3, 'a'); + }, { + name: 'RangeError', + code: 'ERR_OUT_OF_RANGE' + }); + + assert.throws(() => { + tlsSocket.exportKeyingMaterial(0, 'a'); + }, { + name: 'RangeError', + code: 'ERR_OUT_OF_RANGE' + }); + + tlsSocket.end(); + server.close(); + })); +})).listen(0, () => { + const opts = { + port: server.address().port, + rejectUnauthorized: false + }; + + tls.connect(opts, common.mustCall(function() { this.end(); })); +}); diff --git a/test/parallel/test-tls-socket-allow-half-open-option.js b/test/parallel/test-tls-socket-allow-half-open-option.js index 36449a6130c..6b94c39747a 100644 --- a/test/parallel/test-tls-socket-allow-half-open-option.js +++ b/test/parallel/test-tls-socket-allow-half-open-option.js @@ -21,7 +21,10 @@ const tls = require('tls'); { // The option is ignored when the `socket` argument is a generic // `stream.Duplex`. - const duplex = new stream.Duplex({ allowHalfOpen: false }); + const duplex = new stream.Duplex({ + allowHalfOpen: false, + read() {} + }); const socket = new tls.TLSSocket(duplex, { allowHalfOpen: true }); assert.strictEqual(socket.allowHalfOpen, false); } diff --git a/test/parallel/test-tls-wrap-econnreset-localaddress.js b/test/parallel/test-tls-wrap-econnreset-localaddress.js index 9df145ac374..30d3a8873fa 100644 --- a/test/parallel/test-tls-wrap-econnreset-localaddress.js +++ b/test/parallel/test-tls-wrap-econnreset-localaddress.js @@ -13,6 +13,7 @@ const server = net.createServer((c) => { }).listen(common.mustCall(() => { const port = server.address().port; + let errored = false; tls.connect({ port: port, localAddress: common.localhostIPv4 @@ -24,5 +25,9 @@ const server = net.createServer((c) => { assert.strictEqual(e.port, port); assert.strictEqual(e.localAddress, common.localhostIPv4); server.close(); + errored = true; + })) + .on('close', common.mustCall(() => { + assert.strictEqual(errored, true); })); })); diff --git a/test/parallel/test-tls-wrap-econnreset-pipe.js b/test/parallel/test-tls-wrap-econnreset-pipe.js index a5cba7e47bf..f294f23f1d0 100644 --- a/test/parallel/test-tls-wrap-econnreset-pipe.js +++ b/test/parallel/test-tls-wrap-econnreset-pipe.js @@ -31,6 +31,7 @@ if (process.argv[2] !== 'child') { const server = net.createServer((c) => { c.end(); }).listen(common.PIPE, common.mustCall(() => { + let errored = false; tls.connect({ path: common.PIPE }) .once('error', common.mustCall((e) => { assert.strictEqual(e.code, 'ECONNRESET'); @@ -39,5 +40,9 @@ const server = net.createServer((c) => { assert.strictEqual(e.host, undefined); assert.strictEqual(e.localAddress, undefined); server.close(); + errored = true; + })) + .on('close', common.mustCall(() => { + assert.strictEqual(errored, true); })); })); diff --git a/test/parallel/test-tls-wrap-econnreset-socket.js b/test/parallel/test-tls-wrap-econnreset-socket.js index 672da9876fd..ec305b785e0 100644 --- a/test/parallel/test-tls-wrap-econnreset-socket.js +++ b/test/parallel/test-tls-wrap-econnreset-socket.js @@ -15,6 +15,7 @@ const server = net.createServer((c) => { const socket = new net.Socket(); + let errored = false; tls.connect({ socket }) .once('error', common.mustCall((e) => { assert.strictEqual(e.code, 'ECONNRESET'); @@ -22,7 +23,11 @@ const server = net.createServer((c) => { assert.strictEqual(e.host, undefined); assert.strictEqual(e.port, undefined); assert.strictEqual(e.localAddress, undefined); + errored = true; server.close(); + })) + .on('close', common.mustCall(() => { + assert.strictEqual(errored, true); })); socket.connect(port); diff --git a/test/parallel/test-tls-wrap-econnreset.js b/test/parallel/test-tls-wrap-econnreset.js index 5c6db86b75e..6ed268e766f 100644 --- a/test/parallel/test-tls-wrap-econnreset.js +++ b/test/parallel/test-tls-wrap-econnreset.js @@ -13,6 +13,7 @@ const server = net.createServer((c) => { }).listen(common.mustCall(() => { const port = server.address().port; + let errored = false; tls.connect(port, common.localhostIPv4) .once('error', common.mustCall((e) => { assert.strictEqual(e.code, 'ECONNRESET'); @@ -21,5 +22,9 @@ const server = net.createServer((c) => { assert.strictEqual(e.port, port); assert.strictEqual(e.localAddress, undefined); server.close(); + errored = true; + })) + .on('close', common.mustCall(() => { + assert.strictEqual(errored, true); })); })); diff --git a/test/parallel/test-vm-measure-memory.js b/test/parallel/test-vm-measure-memory.js new file mode 100644 index 00000000000..7e620304e0a --- /dev/null +++ b/test/parallel/test-vm-measure-memory.js @@ -0,0 +1,70 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const vm = require('vm'); + +common.expectWarning('ExperimentalWarning', + 'vm.measureMemory is an experimental feature. ' + + 'This feature could change at any time'); + +// The formats could change when V8 is updated, then the tests should be +// updated accordingly. +function assertSummaryShape(result) { + assert.strictEqual(typeof result, 'object'); + assert.strictEqual(typeof result.total, 'object'); + assert.strictEqual(typeof result.total.jsMemoryEstimate, 'number'); + assert(Array.isArray(result.total.jsMemoryRange)); + assert.strictEqual(typeof result.total.jsMemoryRange[0], 'number'); + assert.strictEqual(typeof result.total.jsMemoryRange[1], 'number'); +} + +function assertDetailedShape(result) { + // For now, the detailed shape is the same as the summary shape. This + // should change in future versions of V8. + return assertSummaryShape(result); +} + +// Test measuring memory of the current context +{ + vm.measureMemory() + .then(assertSummaryShape); + + vm.measureMemory({}) + .then(assertSummaryShape); + + vm.measureMemory({ mode: 'summary' }) + .then(assertSummaryShape); + + vm.measureMemory({ mode: 'detailed' }) + .then(assertDetailedShape); + + assert.throws(() => vm.measureMemory(null), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => vm.measureMemory('summary'), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => vm.measureMemory({ mode: 'random' }), { + code: 'ERR_INVALID_ARG_VALUE' + }); +} + +// Test measuring memory of the sandbox +{ + const context = vm.createContext(); + vm.measureMemory({ context }) + .then(assertSummaryShape); + + vm.measureMemory({ mode: 'summary', context },) + .then(assertSummaryShape); + + vm.measureMemory({ mode: 'detailed', context }) + .then(assertDetailedShape); + + assert.throws(() => vm.measureMemory({ mode: 'summary', context: null }), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => vm.measureMemory({ mode: 'summary', context: {} }), { + code: 'ERR_INVALID_ARG_TYPE' + }); +} diff --git a/test/parallel/test-worker-resource-limits.js b/test/parallel/test-worker-resource-limits.js index 2d4ebbc0ce6..9332a132694 100644 --- a/test/parallel/test-worker-resource-limits.js +++ b/test/parallel/test-worker-resource-limits.js @@ -25,7 +25,8 @@ if (!process.env.HAS_STARTED_WORKER) { })); w.on('error', common.expectsError({ code: 'ERR_WORKER_OUT_OF_MEMORY', - message: 'Worker terminated due to reaching memory limit' + message: 'Worker terminated due to reaching memory limit: ' + + 'JS heap out of memory' })); return; } diff --git a/test/parallel/test-zlib-invalid-input.js b/test/parallel/test-zlib-invalid-input.js index 68fa3825b91..eb651be00fb 100644 --- a/test/parallel/test-zlib-invalid-input.js +++ b/test/parallel/test-zlib-invalid-input.js @@ -43,10 +43,11 @@ const unzips = [ ]; nonStringInputs.forEach(common.mustCall((input) => { - // zlib.gunzip should not throw an error when called with bad input. - zlib.gunzip(input, (err, buffer) => { - // zlib.gunzip should pass the error to the callback. - assert.ok(err); + assert.throws(() => { + zlib.gunzip(input); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' }); }, nonStringInputs.length)); diff --git a/test/parallel/test-zlib-object-write.js b/test/parallel/test-zlib-object-write.js index df533d77b3f..2be5edab897 100644 --- a/test/parallel/test-zlib-object-write.js +++ b/test/parallel/test-zlib-object-write.js @@ -1,12 +1,14 @@ 'use strict'; const common = require('../common'); +const assert = require('assert'); const { Gunzip } = require('zlib'); const gunzip = new Gunzip({ objectMode: true }); -gunzip.write({}, common.expectsError({ - name: 'TypeError' -})); -gunzip.on('error', common.expectsError({ - name: 'TypeError' -})); +gunzip.on('error', common.mustNotCall()); +assert.throws(() => { + gunzip.write({}); +}, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' +}); diff --git a/test/parallel/test-zlib-write-after-close.js b/test/parallel/test-zlib-write-after-close.js index d67abee0ac3..eb8ff435396 100644 --- a/test/parallel/test-zlib-write-after-close.js +++ b/test/parallel/test-zlib-write-after-close.js @@ -26,12 +26,7 @@ const zlib = require('zlib'); zlib.gzip('hello', common.mustCall(function(err, out) { const unzip = zlib.createGunzip(); unzip.close(common.mustCall()); - unzip.write(out, common.expectsError({ - code: 'ERR_STREAM_DESTROYED', - name: 'Error', - message: 'Cannot call write after a stream was destroyed' - })); - unzip.on('error', common.expectsError({ + unzip.write('asd', common.expectsError({ code: 'ERR_STREAM_DESTROYED', name: 'Error', message: 'Cannot call write after a stream was destroyed' diff --git a/test/pummel/test-http-many-keep-alive-connections.js b/test/pummel/test-http-many-keep-alive-connections.js index 59eccc328d5..9a7083a8ae6 100644 --- a/test/pummel/test-http-many-keep-alive-connections.js +++ b/test/pummel/test-http-many-keep-alive-connections.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const http = require('http'); @@ -40,9 +40,9 @@ server.once('connection', function(c) { connection = c; }); -server.listen(common.PORT, function connect() { +server.listen(0, function connect() { const request = http.get({ - port: common.PORT, + port: server.address().port, path: '/', headers: { 'Connection': 'Keep-alive' diff --git a/test/pummel/test-http-upload-timeout.js b/test/pummel/test-http-upload-timeout.js index 2c1cbf61988..c3ec3ad19d5 100644 --- a/test/pummel/test-http-upload-timeout.js +++ b/test/pummel/test-http-upload-timeout.js @@ -23,7 +23,7 @@ // This tests setTimeout() by having multiple clients connecting and sending // data in random intervals. Clients are also randomly disconnecting until there // are no more clients left. If no false timeout occurs, this test has passed. -const common = require('../common'); +require('../common'); const http = require('http'); const server = http.createServer(); let connections = 0; @@ -44,13 +44,13 @@ server.on('request', function(req, res) { req.resume(); }); -server.listen(common.PORT, '127.0.0.1', function() { +server.listen(0, '127.0.0.1', function() { for (let i = 0; i < 10; i++) { connections++; setTimeout(function() { const request = http.request({ - port: common.PORT, + port: server.address().port, method: 'POST', path: '/' }); diff --git a/test/pummel/test-https-large-response.js b/test/pummel/test-https-large-response.js index d72fd2a65ba..acf442d2541 100644 --- a/test/pummel/test-https-large-response.js +++ b/test/pummel/test-https-large-response.js @@ -43,9 +43,9 @@ const server = https.createServer(options, common.mustCall(function(req, res) { res.end(body); })); -server.listen(common.PORT, common.mustCall(function() { +server.listen(0, common.mustCall(function() { https.get({ - port: common.PORT, + port: server.address().port, rejectUnauthorized: false }, common.mustCall(function(res) { console.log('response!'); diff --git a/test/pummel/test-https-no-reader.js b/test/pummel/test-https-no-reader.js index 31d2bfa2d3c..63db032d0b6 100644 --- a/test/pummel/test-https-no-reader.js +++ b/test/pummel/test-https-no-reader.js @@ -43,10 +43,10 @@ const server = https.createServer(options, function(req, res) { res.end(); }); -server.listen(common.PORT, function() { +server.listen(0, function() { const req = https.request({ method: 'POST', - port: common.PORT, + port: server.address().port, rejectUnauthorized: false }, function(res) { res.read(0); diff --git a/test/pummel/test-net-pause.js b/test/pummel/test-net-pause.js index 512d833ae75..76237c17214 100644 --- a/test/pummel/test-net-pause.js +++ b/test/pummel/test-net-pause.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const net = require('net'); @@ -43,7 +43,7 @@ const server = net.createServer((connection) => { }); server.on('listening', () => { - const client = net.createConnection(common.PORT); + const client = net.createConnection(server.address().port); client.setEncoding('ascii'); client.on('data', (d) => { console.log(d); @@ -83,7 +83,7 @@ server.on('listening', () => { client.end(); }); }); -server.listen(common.PORT); +server.listen(0); process.on('exit', () => { assert.strictEqual(recv.length, N); diff --git a/test/pummel/test-net-pingpong-delay.js b/test/pummel/test-net-pingpong-delay.js index 392a6e0fed7..bddcd4de72d 100644 --- a/test/pummel/test-net-pingpong-delay.js +++ b/test/pummel/test-net-pingpong-delay.js @@ -24,7 +24,7 @@ const common = require('../common'); const assert = require('assert'); const net = require('net'); -function pingPongTest(port, host, on_complete) { +function pingPongTest(host, on_complete) { const N = 100; const DELAY = 1; let count = 0; @@ -63,8 +63,8 @@ function pingPongTest(port, host, on_complete) { }); }); - server.listen(port, host, common.mustCall(function() { - const client = net.createConnection(port, host); + server.listen(0, host, common.mustCall(function() { + const client = net.createConnection(server.address().port, host); client.setEncoding('utf8'); @@ -104,4 +104,4 @@ function pingPongTest(port, host, on_complete) { })); } -pingPongTest(common.PORT); +pingPongTest(); diff --git a/test/pummel/test-net-throttle.js b/test/pummel/test-net-throttle.js index 190c242d6e1..9708d69f962 100644 --- a/test/pummel/test-net-throttle.js +++ b/test/pummel/test-net-throttle.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const net = require('net'); @@ -32,8 +32,6 @@ let npauses = 0; console.log('build big string'); const body = 'C'.repeat(N); -console.log(`start server on port ${common.PORT}`); - const server = net.createServer((connection) => { connection.write(body.slice(0, part_N)); connection.write(body.slice(part_N, 2 * part_N)); @@ -44,9 +42,11 @@ const server = net.createServer((connection) => { connection.end(); }); -server.listen(common.PORT, () => { +server.listen(0, () => { + const port = server.address().port; + console.log(`server started on port ${port}`); let paused = false; - const client = net.createConnection(common.PORT); + const client = net.createConnection(port); client.setEncoding('ascii'); client.on('data', (d) => { chars_recved += d.length; diff --git a/test/pummel/test-net-timeout.js b/test/pummel/test-net-timeout.js index 59a8d50f796..5b9f2a01b38 100644 --- a/test/pummel/test-net-timeout.js +++ b/test/pummel/test-net-timeout.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const net = require('net'); @@ -54,10 +54,11 @@ const echo_server = net.createServer((socket) => { }); }); -echo_server.listen(common.PORT, () => { - console.log(`server listening at ${common.PORT}`); +echo_server.listen(0, () => { + const port = echo_server.address().port; + console.log(`server listening at ${port}`); - const client = net.createConnection(common.PORT); + const client = net.createConnection(port); client.setEncoding('UTF8'); client.setTimeout(0); // Disable the timeout for client client.on('connect', () => { diff --git a/test/pummel/test-net-timeout2.js b/test/pummel/test-net-timeout2.js index 93fbe7bfab3..4220ef77ca6 100644 --- a/test/pummel/test-net-timeout2.js +++ b/test/pummel/test-net-timeout2.js @@ -48,7 +48,7 @@ const server = net.createServer(function(socket) { }); -server.listen(common.PORT, function() { - const s = net.connect(common.PORT); +server.listen(0, function() { + const s = net.connect(server.address().port); s.pipe(process.stdout); }); diff --git a/test/pummel/test-net-write-callbacks.js b/test/pummel/test-net-write-callbacks.js index 0bcc9e2dec1..cb011ab0022 100644 --- a/test/pummel/test-net-write-callbacks.js +++ b/test/pummel/test-net-write-callbacks.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const net = require('net'); const assert = require('assert'); @@ -55,8 +55,8 @@ function makeCallback(c) { }; } -server.listen(common.PORT, function() { - const client = net.createConnection(common.PORT); +server.listen(0, function() { + const client = net.createConnection(server.address().port); client.on('connect', function() { for (let i = 0; i < N; i++) { diff --git a/test/pummel/test-regress-GH-892.js b/test/pummel/test-regress-GH-892.js index 48201807e39..d5e8cc02156 100644 --- a/test/pummel/test-regress-GH-892.js +++ b/test/pummel/test-regress-GH-892.js @@ -52,7 +52,7 @@ function makeRequest() { // more easily. Also, this is handy when using this test to // view V8 opt/deopt behavior. const args = process.execArgv.concat([ childScript, - common.PORT, + server.address().port, bytesExpected ]); const child = spawn(process.execPath, args); @@ -101,7 +101,7 @@ const server = https.Server(serverOptions, function(req, res) { }); }); -server.listen(common.PORT, function() { +server.listen(0, function() { console.log(`expecting ${bytesExpected} bytes`); makeRequest(); }); diff --git a/test/pummel/test-tls-server-large-request.js b/test/pummel/test-tls-server-large-request.js index 5d3a0615bad..7537ca813af 100644 --- a/test/pummel/test-tls-server-large-request.js +++ b/test/pummel/test-tls-server-large-request.js @@ -59,9 +59,9 @@ const server = tls.Server(options, common.mustCall(function(socket) { socket.pipe(mediator); })); -server.listen(common.PORT, common.mustCall(function() { +server.listen(0, common.mustCall(() => { const client1 = tls.connect({ - port: common.PORT, + port: server.address().port, rejectUnauthorized: false }, common.mustCall(function() { client1.end(request); diff --git a/test/pummel/test-tls-throttle.js b/test/pummel/test-tls-throttle.js index af588dfa91f..da675204a1e 100644 --- a/test/pummel/test-tls-throttle.js +++ b/test/pummel/test-tls-throttle.js @@ -46,9 +46,9 @@ const server = tls.Server(options, common.mustCall(function(socket) { let recvCount = 0; -server.listen(common.PORT, function() { +server.listen(0, function() { const client = tls.connect({ - port: common.PORT, + port: server.address().port, rejectUnauthorized: false }); diff --git a/test/sequential/test-dgram-implicit-bind-failure.js b/test/sequential/test-dgram-implicit-bind-failure.js index d77db12618f..89da00d5766 100644 --- a/test/sequential/test-dgram-implicit-bind-failure.js +++ b/test/sequential/test-dgram-implicit-bind-failure.js @@ -2,48 +2,31 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); +const EventEmitter = require('events'); const dgram = require('dgram'); const dns = require('dns'); const { kStateSymbol } = require('internal/dgram'); +const mockError = new Error('fake DNS'); // Monkey patch dns.lookup() so that it always fails. dns.lookup = function(address, family, callback) { - process.nextTick(() => { callback(new Error('fake DNS')); }); + process.nextTick(() => { callback(mockError); }); }; const socket = dgram.createSocket('udp4'); -let dnsFailures = 0; -let sendFailures = 0; -process.on('exit', () => { - assert.strictEqual(dnsFailures, 3); - assert.strictEqual(sendFailures, 3); -}); - -socket.on('error', (err) => { - if (/^Error: fake DNS$/.test(err)) { - // The DNS lookup should fail since it is monkey patched. At that point in - // time, the send queue should be populated with the send() operation. There - // should also be two listeners - this function and the dgram internal one - // time error handler. - dnsFailures++; - assert(Array.isArray(socket[kStateSymbol].queue)); - assert.strictEqual(socket[kStateSymbol].queue.length, 1); - assert.strictEqual(socket.listenerCount('error'), 2); - return; - } - - if (err.code === 'ERR_SOCKET_CANNOT_SEND') { - // On error, the queue should be destroyed and this function should be - // the only listener. - sendFailures++; - assert.strictEqual(socket[kStateSymbol].queue, undefined); - assert.strictEqual(socket.listenerCount('error'), 1); - return; - } - - assert.fail(`Unexpected error: ${err}`); -}); +socket.on(EventEmitter.errorMonitor, common.mustCall((err) => { + // The DNS lookup should fail since it is monkey patched. At that point in + // time, the send queue should be populated with the send() operation. + assert.strictEqual(err, mockError); + assert(Array.isArray(socket[kStateSymbol].queue)); + assert.strictEqual(socket[kStateSymbol].queue.length, 1); +}, 3)); + +socket.on('error', common.mustCall((err) => { + assert.strictEqual(err, mockError); + assert.strictEqual(socket[kStateSymbol].queue, undefined); +}, 3)); // Initiate a few send() operations, which will fail. socket.send('foobar', common.PORT, 'localhost'); diff --git a/test/sequential/test-fs-watch.js b/test/sequential/test-fs-watch.js index 031e92c61c0..8c543a2a172 100644 --- a/test/sequential/test-fs-watch.js +++ b/test/sequential/test-fs-watch.js @@ -117,14 +117,40 @@ tmpdir.refresh(); // https://github.com/joyent/node/issues/6690 { let oldhandle; - common.expectsInternalAssertion( + assert.throws( () => { const w = fs.watch(__filename, common.mustNotCall()); oldhandle = w._handle; w._handle = { close: w._handle.close }; w.close(); }, - 'handle must be a FSEvent' + { + name: 'Error', + code: 'ERR_INTERNAL_ASSERTION', + message: /^handle must be a FSEvent/, + } + ); + oldhandle.close(); // clean up +} + +{ + let oldhandle; + assert.throws( + () => { + const w = fs.watch(__filename, common.mustNotCall()); + oldhandle = w._handle; + const protoSymbols = + Object.getOwnPropertySymbols(Object.getPrototypeOf(w)); + const kFSWatchStart = + protoSymbols.find((val) => val.toString() === 'Symbol(kFSWatchStart)'); + w._handle = {}; + w[kFSWatchStart](); + }, + { + name: 'Error', + code: 'ERR_INTERNAL_ASSERTION', + message: /^handle must be a FSEvent/, + } ); oldhandle.close(); // clean up } diff --git a/test/sequential/test-https-keep-alive-large-write.js b/test/sequential/test-https-keep-alive-large-write.js deleted file mode 100644 index 79381ba8735..00000000000 --- a/test/sequential/test-https-keep-alive-large-write.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; -const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); -const fixtures = require('../common/fixtures'); -const https = require('https'); - -// This test assesses whether long-running writes can complete -// or timeout because the socket is not aware that the backing -// stream is still writing. - -const writeSize = 30000000; -let socket; - -const server = https.createServer({ - key: fixtures.readKey('agent1-key.pem'), - cert: fixtures.readKey('agent1-cert.pem') -}, common.mustCall((req, res) => { - const content = Buffer.alloc(writeSize, 0x44); - - res.writeHead(200, { - 'Content-Type': 'application/octet-stream', - 'Content-Length': content.length.toString(), - 'Vary': 'Accept-Encoding' - }); - - socket = res.socket; - const onTimeout = socket._onTimeout; - socket._onTimeout = common.mustCallAtLeast(() => onTimeout.call(socket), 1); - res.write(content); - res.end(); -})); -server.on('timeout', common.mustNotCall()); - -server.listen(0, common.mustCall(() => { - https.get({ - path: '/', - port: server.address().port, - rejectUnauthorized: false - }, (res) => { - res.once('data', () => { - socket._onTimeout(); - res.on('data', () => {}); - }); - res.on('end', () => server.close()); - }); -})); diff --git a/test/sequential/test-net-reconnect-error.js b/test/sequential/test-net-reconnect-error.js index e16d567c0ad..93d8f3673ab 100644 --- a/test/sequential/test-net-reconnect-error.js +++ b/test/sequential/test-net-reconnect-error.js @@ -24,24 +24,17 @@ const common = require('../common'); const net = require('net'); const assert = require('assert'); const N = 20; -let client_error_count = 0; -let disconnect_count = 0; +let disconnectCount = 0; const c = net.createConnection(common.PORT); c.on('connect', common.mustNotCall('client should not have connected')); c.on('error', common.mustCall((e) => { - client_error_count++; assert.strictEqual(e.code, 'ECONNREFUSED'); }, N + 1)); c.on('close', common.mustCall(() => { - if (disconnect_count++ < N) + if (disconnectCount++ < N) c.connect(common.PORT); // reconnect }, N + 1)); - -process.on('exit', function() { - assert.strictEqual(disconnect_count, N + 1); - assert.strictEqual(client_error_count, N + 1); -}); diff --git a/test/sequential/test-tls-securepair-client.js b/test/sequential/test-tls-securepair-client.js index c450410baf9..d5e2b7b7d2b 100644 --- a/test/sequential/test-tls-securepair-client.js +++ b/test/sequential/test-tls-securepair-client.js @@ -62,7 +62,7 @@ function test(keyPath, certPath, check, next) { const cert = fixtures.readSync(certPath).toString(); const server = spawn(common.opensslCli, ['s_server', - '-accept', common.PORT, + '-accept', 0, '-cert', fixtures.path(certPath), '-key', fixtures.path(keyPath)]); server.stdout.pipe(process.stdout); @@ -78,10 +78,11 @@ function test(keyPath, certPath, check, next) { console.log(state); switch (state) { case 'WAIT-ACCEPT': - if (/ACCEPT/.test(serverStdoutBuffer)) { - // Give s_server half a second to start up. - setTimeout(startClient, 500); + const matches = serverStdoutBuffer.match(/ACCEPT .*?:(\d+)/); + if (matches) { + const port = matches[1]; state = 'WAIT-HELLO'; + startClient(port); } break; @@ -117,7 +118,7 @@ function test(keyPath, certPath, check, next) { }); - function startClient() { + function startClient(port) { const s = new net.Stream(); const sslcontext = tls.createSecureContext({ key, cert }); @@ -131,7 +132,7 @@ function test(keyPath, certPath, check, next) { pair.encrypted.pipe(s); s.pipe(pair.encrypted); - s.connect(common.PORT); + s.connect(port); s.on('connect', function() { console.log('client connected'); diff --git a/test/wasi/c/link.c b/test/wasi/c/link.c new file mode 100644 index 00000000000..16added7d5c --- /dev/null +++ b/test/wasi/c/link.c @@ -0,0 +1,17 @@ +#include +#include +#include + +#define OLD "/sandbox/input.txt" +#define NEW "/tmp/output.txt" + +int main() { + struct stat st_old; + struct stat st_new; + + assert(0 == stat(OLD, &st_old)); + assert(0 == link(OLD, NEW)); + assert(0 == stat(NEW, &st_new)); + assert(st_old.st_ino == st_new.st_ino); + return 0; +} diff --git a/test/wasi/test-return-on-exit.js b/test/wasi/test-return-on-exit.js new file mode 100644 index 00000000000..41a240f055b --- /dev/null +++ b/test/wasi/test-return-on-exit.js @@ -0,0 +1,18 @@ +// Flags: --experimental-wasi-unstable-preview1 --experimental-wasm-bigint +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); +const { WASI } = require('wasi'); +const wasi = new WASI({ returnOnExit: true }); +const importObject = { wasi_snapshot_preview1: wasi.wasiImport }; +const wasmDir = path.join(__dirname, 'wasm'); +const modulePath = path.join(wasmDir, 'exitcode.wasm'); +const buffer = fs.readFileSync(modulePath); + +(async () => { + const { instance } = await WebAssembly.instantiate(buffer, importObject); + + assert.strictEqual(wasi.start(instance), 120); +})().then(common.mustCall()); diff --git a/test/wasi/test-wasi-options-validation.js b/test/wasi/test-wasi-options-validation.js index f0aa6932db4..36411bc179c 100644 --- a/test/wasi/test-wasi-options-validation.js +++ b/test/wasi/test-wasi-options-validation.js @@ -21,6 +21,10 @@ assert.throws(() => { new WASI({ env: 'fhqwhgads' }); }, assert.throws(() => { new WASI({ preopens: 'fhqwhgads' }); }, { code: 'ERR_INVALID_ARG_TYPE', message: /\bpreopens\b/ }); +// If returnOnExit is not a boolean and not undefined, it should throw. +assert.throws(() => { new WASI({ returnOnExit: 'fhqwhgads' }); }, + { code: 'ERR_INVALID_ARG_TYPE', message: /\breturnOnExit\b/ }); + // If options is provided, but not an object, the constructor should throw. [null, 'foo', '', 0, NaN, Symbol(), true, false, () => {}].forEach((value) => { assert.throws(() => { new WASI(value); }, diff --git a/test/wasi/test-wasi.js b/test/wasi/test-wasi.js index fbd51148cde..357eb7531fb 100644 --- a/test/wasi/test-wasi.js +++ b/test/wasi/test-wasi.js @@ -64,6 +64,7 @@ if (process.argv[2] === 'wasi-child') { runWASI({ test: 'getentropy' }); runWASI({ test: 'getrusage' }); runWASI({ test: 'gettimeofday' }); + runWASI({ test: 'link' }); runWASI({ test: 'main_args' }); runWASI({ test: 'notdir' }); // runWASI({ test: 'poll' }); diff --git a/test/wasi/wasm/link.wasm b/test/wasi/wasm/link.wasm new file mode 100755 index 00000000000..60f5c07601a Binary files /dev/null and b/test/wasi/wasm/link.wasm differ diff --git a/tools/code_cache/cache_builder.cc b/tools/code_cache/cache_builder.cc index 8210355c4c3..28d61a6c70c 100644 --- a/tools/code_cache/cache_builder.cc +++ b/tools/code_cache/cache_builder.cc @@ -1,4 +1,5 @@ #include "cache_builder.h" +#include "debug_utils-inl.h" #include "node_native_module.h" #include "util.h" @@ -67,8 +68,7 @@ static void GetInitializer(const std::string& id, std::stringstream& ss) { } static std::string GenerateCodeCache( - const std::map& data, - bool log_progress) { + const std::map& data) { std::stringstream ss; ss << R"(#include #include "node_native_module_env.h" @@ -89,11 +89,13 @@ const bool has_code_cache = true; total += cached_data->length; std::string def = GetDefinition(id, cached_data->length, cached_data->data); ss << def << "\n\n"; - if (log_progress) { - std::cout << "Generated cache for " << id - << ", size = " << FormatSize(cached_data->length) - << ", total = " << FormatSize(total) << "\n"; - } + std::string size_str = FormatSize(cached_data->length); + std::string total_str = FormatSize(total); + per_process::Debug(DebugCategory::CODE_CACHE, + "Generated cache for %s, size = %s, total = %s\n", + id.c_str(), + size_str.c_str(), + total_str.c_str()); } ss << R"(void NativeModuleEnv::InitializeCodeCache() { @@ -142,14 +144,7 @@ std::string CodeCacheBuilder::Generate(Local context) { } } - char env_buf[32]; - size_t env_size = sizeof(env_buf); - int ret = uv_os_getenv("NODE_DEBUG", env_buf, &env_size); - bool log_progress = false; - if (ret == 0 && strcmp(env_buf, "mkcodecache") == 0) { - log_progress = true; - } - return GenerateCodeCache(data, log_progress); + return GenerateCodeCache(data); } } // namespace native_module diff --git a/tools/code_cache/mkcodecache.cc b/tools/code_cache/mkcodecache.cc index e5b43a44b8d..34af7bc61ba 100644 --- a/tools/code_cache/mkcodecache.cc +++ b/tools/code_cache/mkcodecache.cc @@ -6,6 +6,7 @@ #include #include "cache_builder.h" +#include "debug_utils-inl.h" #include "libplatform/libplatform.h" #include "v8.h" @@ -40,6 +41,8 @@ int main(int argc, char* argv[]) { return 1; } + node::per_process::enabled_debug_list.Parse(nullptr); + std::unique_ptr platform = v8::platform::NewDefaultPlatform(); v8::V8::InitializePlatform(platform.get()); v8::V8::Initialize(); diff --git a/tools/doc/versions.js b/tools/doc/versions.js index 7a4e2c3ff76..bff6ac3617f 100644 --- a/tools/doc/versions.js +++ b/tools/doc/versions.js @@ -31,6 +31,8 @@ const getUrl = (url) => { }); }; +const kNoInternet = !!process.env.NODE_TEST_NO_INTERNET; + module.exports = { async versions() { if (_versions) { @@ -42,20 +44,24 @@ module.exports = { const url = 'https://raw.githubusercontent.com/nodejs/node/master/CHANGELOG.md'; let changelog; - try { - changelog = await getUrl(url); - } catch (e) { - // Fail if this is a release build, otherwise fallback to local files. - if (isRelease()) { - throw e; - } else { - const file = path.join(srcRoot, 'CHANGELOG.md'); - console.warn(`Unable to retrieve ${url}. Falling back to ${file}.`); - changelog = readFileSync(file, { encoding: 'utf8' }); + const file = path.join(srcRoot, 'CHANGELOG.md'); + if (kNoInternet) { + changelog = readFileSync(file, { encoding: 'utf8' }); + } else { + try { + changelog = await getUrl(url); + } catch (e) { + // Fail if this is a release build, otherwise fallback to local files. + if (isRelease()) { + throw e; + } else { + console.warn(`Unable to retrieve ${url}. Falling back to ${file}.`); + changelog = readFileSync(file, { encoding: 'utf8' }); + } } } const ltsRE = /Long Term Support/i; - const versionRE = /\* \[Node\.js ([0-9.]+)\][^-—]+[-—]\s*(.*)\r?\n/g; + const versionRE = /\* \[Node\.js ([0-9.]+)\]\S+ (.*)\r?\n/g; _versions = []; let match; while ((match = versionRE.exec(changelog)) != null) { diff --git a/tools/gyp/DEPS b/tools/gyp/DEPS deleted file mode 100644 index 167fb779b0e..00000000000 --- a/tools/gyp/DEPS +++ /dev/null @@ -1,23 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -vars = { - "chromium_git": "https://chromium.googlesource.com/", -} - -deps = { -} - -deps_os = { - "win": { - "third_party/cygwin": - Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9", - - "third_party/python_26": - Var("chromium_git") + "chromium/deps/python_26@5bb4080", - - "src/third_party/pefile": - Var("chromium_git") + "external/pefile@72c6ae4", - }, -} diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py deleted file mode 100755 index cdd347d0bcc..00000000000 --- a/tools/gyp/buildbot/buildbot_run.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Argument-less script to select what to run on the buildbots.""" -from __future__ import print_function - -import os -import shutil -import subprocess -import sys - - -BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) -TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) -ROOT_DIR = os.path.dirname(TRUNK_DIR) -CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') -CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') -OUT_DIR = os.path.join(TRUNK_DIR, 'out') - - -def CallSubProcess(*args, **kwargs): - """Wrapper around subprocess.call which treats errors as build exceptions.""" - with open(os.devnull) as devnull_fd: - retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs) - if retcode != 0: - print('@@@STEP_EXCEPTION@@@') - sys.exit(1) - - -def PrepareCmake(): - """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" - if os.environ['BUILDBOT_CLOBBER'] == '1': - print('@@@BUILD_STEP Clobber CMake checkout@@@') - shutil.rmtree(CMAKE_DIR) - - # We always build CMake 2.8.8, so no need to do anything - # if the directory already exists. - if os.path.isdir(CMAKE_DIR): - return - - print('@@@BUILD_STEP Initialize CMake checkout@@@') - os.mkdir(CMAKE_DIR) - - print('@@@BUILD_STEP Sync CMake@@@') - CallSubProcess( - ['git', 'clone', - '--depth', '1', - '--single-branch', - '--branch', 'v2.8.8', - '--', - 'git://cmake.org/cmake.git', - CMAKE_DIR], - cwd=CMAKE_DIR) - - print('@@@BUILD_STEP Build CMake@@@') - CallSubProcess( - ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], - cwd=CMAKE_DIR) - - CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) - - -def GypTestFormat(title, format=None, msvs_version=None, tests=[]): - """Run the gyp tests for a given format, emitting annotator tags. - - See annotator docs at: - https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations - Args: - format: gyp format to test. - Returns: - 0 for sucesss, 1 for failure. - """ - if not format: - format = title - - print('@@@BUILD_STEP ' + title + '@@@') - sys.stdout.flush() - env = os.environ.copy() - if msvs_version: - env['GYP_MSVS_VERSION'] = msvs_version - command = ' '.join( - [sys.executable, 'gyp/gyptest.py', - '--all', - '--passed', - '--format', format, - '--path', CMAKE_BIN_DIR, - '--chdir', 'gyp'] + tests) - retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) - if retcode: - # Emit failure tag, and keep going. - print('@@@STEP_FAILURE@@@') - return 1 - return 0 - - -def GypBuild(): - # Dump out/ directory. - print('@@@BUILD_STEP cleanup@@@') - print('Removing %s...' % OUT_DIR) - shutil.rmtree(OUT_DIR, ignore_errors=True) - print('Done.') - - retcode = 0 - if sys.platform.startswith('linux'): - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('make') - PrepareCmake() - retcode += GypTestFormat('cmake') - elif sys.platform == 'darwin': - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('xcode') - retcode += GypTestFormat('make') - elif sys.platform == 'win32': - retcode += GypTestFormat('ninja') - if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': - retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja', - msvs_version='2013', - tests=[ - r'test\generator-output\gyptest-actions.py', - r'test\generator-output\gyptest-relocate.py', - r'test\generator-output\gyptest-rules.py']) - retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013') - else: - raise Exception('Unknown platform') - if retcode: - # TODO(bradnelson): once the annotator supports a postscript (section for - # after the build proper that could be used for cumulative failures), - # use that instead of this. This isolates the final return value so - # that it isn't misattributed to the last stage. - print('@@@BUILD_STEP failures@@@') - sys.exit(retcode) - - -if __name__ == '__main__': - GypBuild() diff --git a/tools/gyp/buildbot/commit_queue/OWNERS b/tools/gyp/buildbot/commit_queue/OWNERS deleted file mode 100644 index b269c198b43..00000000000 --- a/tools/gyp/buildbot/commit_queue/OWNERS +++ /dev/null @@ -1,6 +0,0 @@ -set noparent -bradnelson@chromium.org -bradnelson@google.com -iannucci@chromium.org -scottmg@chromium.org -thakis@chromium.org diff --git a/tools/gyp/buildbot/commit_queue/README b/tools/gyp/buildbot/commit_queue/README deleted file mode 100644 index 94284978832..00000000000 --- a/tools/gyp/buildbot/commit_queue/README +++ /dev/null @@ -1,3 +0,0 @@ -cq_config.json describes the trybots that must pass in order -to land a change through the commit queue. -Comments are here as the file is strictly JSON. diff --git a/tools/gyp/buildbot/commit_queue/cq_config.json b/tools/gyp/buildbot/commit_queue/cq_config.json deleted file mode 100644 index 656c21e54fb..00000000000 --- a/tools/gyp/buildbot/commit_queue/cq_config.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "trybots": { - "launched": { - "tryserver.nacl": { - "gyp-presubmit": ["defaulttests"], - "gyp-linux": ["defaulttests"], - "gyp-mac": ["defaulttests"], - "gyp-win32": ["defaulttests"], - "gyp-win64": ["defaulttests"] - } - }, - "triggered": { - } - } -} diff --git a/tools/gyp/codereview.settings b/tools/gyp/codereview.settings deleted file mode 100644 index 27fb9f99e25..00000000000 --- a/tools/gyp/codereview.settings +++ /dev/null @@ -1,6 +0,0 @@ -# This file is used by git cl to get repository specific information. -CC_LIST: gyp-developer@googlegroups.com -CODE_REVIEW_SERVER: codereview.chromium.org -GERRIT_HOST: True -PROJECT: gyp -VIEW_VC: https://chromium.googlesource.com/external/gyp/+/ diff --git a/tools/gyp/gyp_main.py b/tools/gyp/gyp_main.py index 25a6eba94aa..f738e8009f7 100755 --- a/tools/gyp/gyp_main.py +++ b/tools/gyp/gyp_main.py @@ -6,10 +6,44 @@ import os import sys +import subprocess + +PY3 = bytes != str + +# Below IsCygwin() function copied from pylib/gyp/common.py +def IsCygwin(): + try: + out = subprocess.Popen("uname", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return "CYGWIN" in str(stdout) + except Exception: + return False + + +def UnixifyPath(path): + try: + if not IsCygwin(): + return path + out = subprocess.Popen(["cygpath", "-u", path], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, _ = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return str(stdout) + except Exception: + return path + # Make sure we're using the version of pylib in this repo, not one installed -# elsewhere on the system. -sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) +# elsewhere on the system. Also convert to Unix style path on Cygwin systems, +# else the 'gyp' library will not be found +path = UnixifyPath(sys.argv[0]) +sys.path.insert(0, os.path.join(os.path.dirname(path), 'pylib')) import gyp if __name__ == '__main__': diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py deleted file mode 100755 index 1a9ffca7a13..00000000000 --- a/tools/gyp/gyptest.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gyptest.py -- test runner for GYP tests.""" - -from __future__ import print_function - -import argparse -import math -import os -import platform -import subprocess -import sys -import time - - -def is_test_name(f): - return f.startswith('gyptest') and f.endswith('.py') - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - parser = argparse.ArgumentParser() - parser.add_argument("-a", "--all", action="store_true", - help="run all tests") - parser.add_argument("-C", "--chdir", action="store", - help="change to directory") - parser.add_argument("-f", "--format", action="store", default='', - help="run tests with the specified formats") - parser.add_argument("-G", '--gyp_option', action="append", default=[], - help="Add -G options to the gyp command line") - parser.add_argument("-l", "--list", action="store_true", - help="list available tests and exit") - parser.add_argument("-n", "--no-exec", action="store_true", - help="no execute, just print the command line") - parser.add_argument("--path", action="append", default=[], - help="additional $PATH directory") - parser.add_argument("-q", "--quiet", action="store_true", - help="quiet, don't print anything unless there are failures") - parser.add_argument("-v", "--verbose", action="store_true", - help="print configuration info and test results.") - parser.add_argument('tests', nargs='*') - args = parser.parse_args(argv[1:]) - - if args.chdir: - os.chdir(args.chdir) - - if args.path: - extra_path = [os.path.abspath(p) for p in args.path] - extra_path = os.pathsep.join(extra_path) - os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH'] - - if not args.tests: - if not args.all: - sys.stderr.write('Specify -a to get all tests.\n') - return 1 - args.tests = ['test'] - - tests = [] - for arg in args.tests: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - if not is_test_name(os.path.basename(arg)): - print(arg, 'is not a valid gyp test name.', file=sys.stderr) - sys.exit(1) - tests.append(arg) - - if args.list: - for test in tests: - print(test) - sys.exit(0) - - os.environ['PYTHONPATH'] = os.path.abspath('test/lib') - - if args.verbose: - print_configuration_info() - - if args.gyp_option and not args.quiet: - print('Extra Gyp options: %s\n' % args.gyp_option) - - if args.format: - format_list = args.format.split(',') - else: - format_list = { - 'aix5': ['make'], - 'freebsd7': ['make'], - 'freebsd8': ['make'], - 'openbsd5': ['make'], - 'cygwin': ['msvs'], - 'win32': ['msvs', 'ninja'], - 'linux': ['make', 'ninja'], - 'linux2': ['make', 'ninja'], - 'linux3': ['make', 'ninja'], - - # TODO: Re-enable xcode-ninja. - # https://bugs.chromium.org/p/gyp/issues/detail?id=530 - # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'], - 'darwin': ['make', 'ninja', 'xcode'], - }[sys.platform] - - gyp_options = [] - for option in args.gyp_option: - gyp_options += ['-G', option] - - runner = Runner(format_list, tests, gyp_options, args.verbose) - runner.run() - - if not args.quiet: - runner.print_results() - - if runner.failures: - return 1 - else: - return 0 - - -def print_configuration_info(): - print('Test configuration:') - if sys.platform == 'darwin': - sys.path.append(os.path.abspath('test/lib')) - import TestMac - print(' Mac %s %s' % (platform.mac_ver()[0], platform.mac_ver()[2])) - print(' Xcode %s' % TestMac.Xcode.Version()) - elif sys.platform == 'win32': - sys.path.append(os.path.abspath('pylib')) - import gyp.MSVSVersion - print(' Win %s %s\n' % platform.win32_ver()[0:2]) - print(' MSVS %s' % - gyp.MSVSVersion.SelectVisualStudioVersion().Description()) - elif sys.platform in ('linux', 'linux2'): - print(' Linux %s' % ' '.join(platform.linux_distribution())) - print(' Python %s' % platform.python_version()) - print(' PYTHONPATH=%s' % os.environ['PYTHONPATH']) - print() - - -class Runner(object): - def __init__(self, formats, tests, gyp_options, verbose): - self.formats = formats - self.tests = tests - self.verbose = verbose - self.gyp_options = gyp_options - self.failures = [] - self.num_tests = len(formats) * len(tests) - num_digits = len(str(self.num_tests)) - self.fmt_str = '[%%%dd/%%%dd] (%%s) %%s' % (num_digits, num_digits) - self.isatty = sys.stdout.isatty() and not self.verbose - self.env = os.environ.copy() - self.hpos = 0 - - def run(self): - run_start = time.time() - - i = 1 - for fmt in self.formats: - for test in self.tests: - self.run_test(test, fmt, i) - i += 1 - - if self.isatty: - self.erase_current_line() - - self.took = time.time() - run_start - - def run_test(self, test, fmt, i): - if self.isatty: - self.erase_current_line() - - msg = self.fmt_str % (i, self.num_tests, fmt, test) - self.print_(msg) - - start = time.time() - cmd = [sys.executable, test] + self.gyp_options - self.env['TESTGYP_FORMAT'] = fmt - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, env=self.env) - proc.wait() - took = time.time() - start - - stdout = proc.stdout.read().decode('utf8') - if proc.returncode == 2: - res = 'skipped' - elif proc.returncode: - res = 'failed' - self.failures.append('(%s) %s' % (test, fmt)) - else: - res = 'passed' - res_msg = ' %s %.3fs' % (res, took) - self.print_(res_msg) - - if (stdout and - not stdout.endswith('PASSED\n') and - not (stdout.endswith('NO RESULT\n'))): - print() - for l in stdout.splitlines(): - print(' %s' % l) - elif not self.isatty: - print() - - def print_(self, msg): - print(msg, end='') - index = msg.rfind('\n') - if index == -1: - self.hpos += len(msg) - else: - self.hpos = len(msg) - index - sys.stdout.flush() - - def erase_current_line(self): - print('\b' * self.hpos + ' ' * self.hpos + '\b' * self.hpos, end='') - sys.stdout.flush() - self.hpos = 0 - - def print_results(self): - num_failures = len(self.failures) - if num_failures: - print() - if num_failures == 1: - print("Failed the following test:") - else: - print("Failed the following %d tests:" % num_failures) - print("\t" + "\n\t".join(sorted(self.failures))) - print() - print('Ran %d tests in %.3fs, %d failed.' % (self.num_tests, self.took, - num_failures)) - print() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py index 0f53ff87c77..5dd8f8c1e6e 100644 --- a/tools/gyp/pylib/gyp/MSVSSettings.py +++ b/tools/gyp/pylib/gyp/MSVSSettings.py @@ -522,8 +522,8 @@ def _ValidateSettings(validators, settings, stderr): try: tool_validators[setting](value) except ValueError as e: - print('Warning: for %s/%s, %s' % (tool_name, setting, e), - file=stderr) + print('Warning: for %s/%s, %s' % + (tool_name, setting, e), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -976,7 +976,9 @@ def _ValidateSettings(validators, settings, stderr): _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 - 'X64'])) # /env x64 + 'X64', # /env x64 + 'ARM64', # /env arm64 + ])) _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none diff --git a/tools/gyp/pylib/gyp/MSVSSettings_test.py b/tools/gyp/pylib/gyp/MSVSSettings_test.py index 245478c8dae..77b79e650d8 100755 --- a/tools/gyp/pylib/gyp/MSVSSettings_test.py +++ b/tools/gyp/pylib/gyp/MSVSSettings_test.py @@ -1085,6 +1085,7 @@ def testConvertToMSBuildSettings_full_synthetic(self): 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}} + self.maxDiff = 9999 # on failure display a long diff actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) @@ -1476,6 +1477,7 @@ def testConvertToMSBuildSettings_actual(self): 'ResourceOutputFileName': '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} } + self.maxDiff = 9999 # on failure display a long diff actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py index f89f1d0fc2e..ce9b349834c 100644 --- a/tools/gyp/pylib/gyp/MSVSVersion.py +++ b/tools/gyp/pylib/gyp/MSVSVersion.py @@ -12,6 +12,8 @@ import gyp import glob +PY3 = bytes != str + def JoinPath(*args): return os.path.normpath(os.path.join(*args)) @@ -163,6 +165,8 @@ def _RegistryQueryBase(sysdir, key, value): # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid # Note that the error text may be in [1] in some cases text = p.communicate()[0] + if PY3: + text = text.decode('utf-8') # Check return code from reg.exe; officially 0==success and 1==error if p.returncode: return None @@ -385,6 +389,8 @@ def _ConvertToCygpath(path): if sys.platform == 'cygwin': p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) path = p.communicate()[0].strip() + if PY3: + path = path.decode('utf-8') return path diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index 351800ee25e..aa410e1dfdd 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -8,12 +8,15 @@ import re import tempfile import sys +import subprocess try: from collections.abc import MutableSet except ImportError: from collections import MutableSet +PY3 = bytes != str + # A minimal memoizing decorator. It'll blow up if the args aren't immutable, # among other "problems". @@ -341,11 +344,16 @@ def WriteOnDiff(filename): class Writer(object): """Wrapper around file which only covers the target if it differs.""" def __init__(self): + # On Cygwin remove the "dir" argument because `C:` prefixed paths are treated as relative, + # consequently ending up with current dir "/cygdrive/c/..." being prefixed to those, which was + # obviously a non-existent path, for example: "/cygdrive/c//C:\". + # See https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp for more details + base_temp_dir = "" if IsCygwin() else os.path.dirname(filename) # Pick temporary file. tmp_fd, self.tmp_path = tempfile.mkstemp( suffix='.tmp', prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) + dir=base_temp_dir) try: self.tmp_file = os.fdopen(tmp_fd, 'wb') except Exception: @@ -426,9 +434,7 @@ def GetFlavor(params): return flavors[sys.platform] if sys.platform.startswith('sunos'): return 'solaris' - if sys.platform.startswith('freebsd'): - return 'freebsd' - if sys.platform.startswith('dragonfly'): + if sys.platform.startswith(('dragonfly', 'freebsd')): return 'freebsd' if sys.platform.startswith('openbsd'): return 'openbsd' @@ -436,6 +442,8 @@ def GetFlavor(params): return 'netbsd' if sys.platform.startswith('aix'): return 'aix' + if sys.platform.startswith(('os390', 'zos')): + return 'zos' return 'linux' @@ -620,3 +628,15 @@ def CrossCompileRequested(): os.environ.get('AR_target') or os.environ.get('CC_target') or os.environ.get('CXX_target')) + +def IsCygwin(): + try: + out = subprocess.Popen("uname", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return "CYGWIN" in str(stdout) + except Exception: + return False diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py new file mode 100644 index 00000000000..cecb28c3660 --- /dev/null +++ b/tools/gyp/pylib/gyp/generator/android.py @@ -0,0 +1,1097 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Notes: +# +# This generates makefiles suitable for inclusion into the Android build system +# via an Android.mk file. It is based on make.py, the standard makefile +# generator. +# +# The code below generates a separate .mk file for each target, but +# all are sourced by the top-level GypAndroid.mk. This means that all +# variables in .mk-files clobber one another, and furthermore that any +# variables set potentially clash with other Android build system variables. +# Try to avoid setting global variables where possible. + +from __future__ import print_function + +import gyp +import gyp.common +import gyp.generator.make as make # Reuse global functions from make backend. +import os +import re +import subprocess + +generator_default_variables = { + 'OS': 'android', + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'SHARED_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'SHARED_LIB_SUFFIX': '.so', + 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)', + 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)', + 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)', + 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', + 'LIB_DIR': '$(obj).$(TOOLSET)', + 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. + 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. + 'RULE_INPUT_PATH': '$(RULE_SOURCES)', + 'RULE_INPUT_EXT': '$(suffix $<)', + 'RULE_INPUT_NAME': '$(notdir $<)', + 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)', +} + +# Make supports multiple toolsets +generator_supports_multiple_toolsets = True + + +# Generator-specific gyp specs. +generator_additional_non_configuration_keys = [ + # Boolean to declare that this target does not want its name mangled. + 'android_unmangled_name', + # Map of android build system variables to set. + 'aosp_build_settings', +] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] + + +ALL_MODULES_FOOTER = """\ +# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from +# all the included sub-makefiles. This is just here to clarify. +gyp_all_modules: +""" + +header = """\ +# This file is generated by gyp; do not edit. + +""" + +# Map gyp target types to Android module classes. +MODULE_CLASSES = { + 'static_library': 'STATIC_LIBRARIES', + 'shared_library': 'SHARED_LIBRARIES', + 'executable': 'EXECUTABLES', +} + + +def IsCPPExtension(ext): + return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx' + + +def Sourceify(path): + """Convert a path to its source directory form. The Android backend does not + support options.generator_output, so this function is a noop.""" + return path + + +# Map from qualified target to path to output. +# For Android, the target of these maps is a tuple ('static', 'modulename'), +# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, +# since we link by module. +target_outputs = {} +# Map from qualified target to any linkable output. A subset +# of target_outputs. E.g. when mybinary depends on liba, we want to +# include liba in the linker line; when otherbinary depends on +# mybinary, we just want to build mybinary first. +target_link_deps = {} + + +class AndroidMkWriter(object): + """AndroidMkWriter packages up the writing of one target-specific Android.mk. + + Its only real entry point is Write(), and is mostly used for namespacing. + """ + + def __init__(self, android_top_dir): + self.android_top_dir = android_top_dir + + def Write(self, qualified_target, relative_target, base_path, output_filename, + spec, configs, part_of_all, write_alias_target, sdk_version): + """The main entry point: writes a .mk file for a single target. + + Arguments: + qualified_target: target we're generating + relative_target: qualified target name relative to the root + base_path: path relative to source root we're building in, used to resolve + target-relative paths + output_filename: output .mk file name to write + spec, configs: gyp info + part_of_all: flag indicating this target is part of 'all' + write_alias_target: flag indicating whether to create short aliases for + this target + sdk_version: what to emit for LOCAL_SDK_VERSION in output + """ + gyp.common.EnsureDirExists(output_filename) + + self.fp = open(output_filename, 'w') + + self.fp.write(header) + + self.qualified_target = qualified_target + self.relative_target = relative_target + self.path = base_path + self.target = spec['target_name'] + self.type = spec['type'] + self.toolset = spec['toolset'] + + deps, link_deps = self.ComputeDeps(spec) + + # Some of the generation below can add extra output, sources, or + # link dependencies. All of the out params of the functions that + # follow use names like extra_foo. + extra_outputs = [] + extra_sources = [] + + self.android_class = MODULE_CLASSES.get(self.type, 'GYP') + self.android_module = self.ComputeAndroidModule(spec) + (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) + self.output = self.output_binary = self.ComputeOutput(spec) + + # Standard header. + self.WriteLn('include $(CLEAR_VARS)\n') + + # Module class and name. + self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class) + self.WriteLn('LOCAL_MODULE := ' + self.android_module) + # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. + # The library module classes fail if the stem is set. ComputeOutputParts + # makes sure that stem == modulename in these cases. + if self.android_stem != self.android_module: + self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem) + self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix) + if self.toolset == 'host': + self.WriteLn('LOCAL_IS_HOST_MODULE := true') + self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)') + elif sdk_version > 0: + self.WriteLn('LOCAL_MODULE_TARGET_ARCH := ' + '$(TARGET_$(GYP_VAR_PREFIX)ARCH)') + self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version) + + # Grab output directories; needed for Actions and Rules. + if self.toolset == 'host': + self.WriteLn('gyp_intermediate_dir := ' + '$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))') + else: + self.WriteLn('gyp_intermediate_dir := ' + '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))') + self.WriteLn('gyp_shared_intermediate_dir := ' + '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))') + self.WriteLn() + + # List files this target depends on so that actions/rules/copies/sources + # can depend on the list. + # TODO: doesn't pull in things through transitive link deps; needed? + target_dependencies = [x[1] for x in deps if x[0] == 'path'] + self.WriteLn('# Make sure our deps are built first.') + self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', + local_pathify=True) + + # Actions must come first, since they can generate more OBJs for use below. + if 'actions' in spec: + self.WriteActions(spec['actions'], extra_sources, extra_outputs) + + # Rules must be early like actions. + if 'rules' in spec: + self.WriteRules(spec['rules'], extra_sources, extra_outputs) + + if 'copies' in spec: + self.WriteCopies(spec['copies'], extra_outputs) + + # GYP generated outputs. + self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) + + # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend + # on both our dependency targets and our generated files. + self.WriteLn('# Make sure our deps and generated files are built first.') + self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) ' + '$(GYP_GENERATED_OUTPUTS)') + self.WriteLn() + + # Sources. + if spec.get('sources', []) or extra_sources: + self.WriteSources(spec, configs, extra_sources) + + self.WriteTarget(spec, configs, deps, link_deps, part_of_all, + write_alias_target) + + # Update global list of target outputs, used in dependency tracking. + target_outputs[qualified_target] = ('path', self.output_binary) + + # Update global list of link dependencies. + if self.type == 'static_library': + target_link_deps[qualified_target] = ('static', self.android_module) + elif self.type == 'shared_library': + target_link_deps[qualified_target] = ('shared', self.android_module) + + self.fp.close() + return self.android_module + + + def WriteActions(self, actions, extra_sources, extra_outputs): + """Write Makefile code for any 'actions' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + actions (used to make other pieces dependent on these + actions) + """ + for action in actions: + name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, + action['action_name'])) + self.WriteLn('### Rules for action "%s":' % action['action_name']) + inputs = action['inputs'] + outputs = action['outputs'] + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = set() + for out in outputs: + if not out.startswith('$'): + print('WARNING: Action for target "%s" writes output to local path ' + '"%s".' % (self.target, out)) + dir = os.path.split(out)[0] + if dir: + dirs.add(dir) + if int(action.get('process_outputs_as_sources', False)): + extra_sources += outputs + + # Prepare the actual command. + command = gyp.common.EncodePOSIXShellList(action['action']) + if 'message' in action: + quiet_cmd = 'Gyp action: %s ($@)' % action['message'] + else: + quiet_cmd = 'Gyp action: %s ($@)' % name + if len(dirs) > 0: + command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command + + cd_action = 'cd $(gyp_local_path)/%s; ' % self.path + command = cd_action + command + + # The makefile rules are all relative to the top dir, but the gyp actions + # are defined relative to their containing dir. This replaces the gyp_* + # variables for the action rule with an absolute version so that the + # output goes in the right place. + # Only write the gyp_* rules for the "primary" output (:1); + # it's superfluous for the "extra outputs", and this avoids accidentally + # writing duplicate dummy rules for those outputs. + main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) + self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) + self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) + self.WriteLn('%s: gyp_intermediate_dir := ' + '$(abspath $(gyp_intermediate_dir))' % main_output) + self.WriteLn('%s: gyp_shared_intermediate_dir := ' + '$(abspath $(gyp_shared_intermediate_dir))' % main_output) + + # Android's envsetup.sh adds a number of directories to the path including + # the built host binary directory. This causes actions/rules invoked by + # gyp to sometimes use these instead of system versions, e.g. bison. + # The built host binaries may not be suitable, and can cause errors. + # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable + # set by envsetup. + self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' + % main_output) + + # Don't allow spaces in input/output filenames, but make an exception for + # filenames which start with '$(' since it's okay for there to be spaces + # inside of make function/macro invocations. + for input in inputs: + if not input.startswith('$(') and ' ' in input: + raise gyp.common.GypError( + 'Action input filename "%s" in target %s contains a space' % + (input, self.target)) + for output in outputs: + if not output.startswith('$(') and ' ' in output: + raise gyp.common.GypError( + 'Action output filename "%s" in target %s contains a space' % + (output, self.target)) + + self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % + (main_output, ' '.join(map(self.LocalPathify, inputs)))) + self.WriteLn('\t@echo "%s"' % quiet_cmd) + self.WriteLn('\t$(hide)%s\n' % command) + for output in outputs[1:]: + # Make each output depend on the main output, with an empty command + # to force make to notice that the mtime has changed. + self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output)) + + extra_outputs += outputs + self.WriteLn() + + self.WriteLn() + + + def WriteRules(self, rules, extra_sources, extra_outputs): + """Write Makefile code for any 'rules' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + rules (used to make other pieces dependent on these rules) + """ + if len(rules) == 0: + return + + for rule in rules: + if len(rule.get('rule_sources', [])) == 0: + continue + name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, + rule['rule_name'])) + self.WriteLn('\n### Generated for rule "%s":' % name) + self.WriteLn('# "%s":' % rule) + + inputs = rule.get('inputs') + for rule_source in rule.get('rule_sources', []): + (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) + (rule_source_root, rule_source_ext) = \ + os.path.splitext(rule_source_basename) + + outputs = [self.ExpandInputRoot(out, rule_source_root, + rule_source_dirname) + for out in rule['outputs']] + + dirs = set() + for out in outputs: + if not out.startswith('$'): + print('WARNING: Rule for target %s writes output to local path %s' + % (self.target, out)) + dir = os.path.dirname(out) + if dir: + dirs.add(dir) + extra_outputs += outputs + if int(rule.get('process_outputs_as_sources', False)): + extra_sources.extend(outputs) + + components = [] + for component in rule['action']: + component = self.ExpandInputRoot(component, rule_source_root, + rule_source_dirname) + if '$(RULE_SOURCES)' in component: + component = component.replace('$(RULE_SOURCES)', + rule_source) + components.append(component) + + command = gyp.common.EncodePOSIXShellList(components) + cd_action = 'cd $(gyp_local_path)/%s; ' % self.path + command = cd_action + command + if dirs: + command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command + + # We set up a rule to build the first output, and then set up + # a rule for each additional output to depend on the first. + outputs = map(self.LocalPathify, outputs) + main_output = outputs[0] + self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) + self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output) + self.WriteLn('%s: gyp_intermediate_dir := ' + '$(abspath $(gyp_intermediate_dir))' % main_output) + self.WriteLn('%s: gyp_shared_intermediate_dir := ' + '$(abspath $(gyp_shared_intermediate_dir))' % main_output) + + # See explanation in WriteActions. + self.WriteLn('%s: export PATH := ' + '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) + + main_output_deps = self.LocalPathify(rule_source) + if inputs: + main_output_deps += ' ' + main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) + + self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % + (main_output, main_output_deps)) + self.WriteLn('\t%s\n' % command) + for output in outputs[1:]: + # Make each output depend on the main output, with an empty command + # to force make to notice that the mtime has changed. + self.WriteLn('%s: %s ;' % (output, main_output)) + self.WriteLn() + + self.WriteLn() + + + def WriteCopies(self, copies, extra_outputs): + """Write Makefile code for any 'copies' from the gyp input. + + extra_outputs: a list that will be filled in with any outputs of this action + (used to make other pieces dependent on this action) + """ + self.WriteLn('### Generated for copy rule.') + + variable = make.StringToMakefileVariable(self.relative_target + '_copies') + outputs = [] + for copy in copies: + for path in copy['files']: + # The Android build system does not allow generation of files into the + # source tree. The destination should start with a variable, which will + # typically be $(gyp_intermediate_dir) or + # $(gyp_shared_intermediate_dir). Note that we can't use an assertion + # because some of the gyp tests depend on this. + if not copy['destination'].startswith('$'): + print('WARNING: Copy rule for target %s writes output to ' + 'local path %s' % (self.target, copy['destination'])) + + # LocalPathify() calls normpath, stripping trailing slashes. + path = Sourceify(self.LocalPathify(path)) + filename = os.path.split(path)[1] + output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], + filename))) + + self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % + (output, path)) + self.WriteLn('\t@echo Copying: $@') + self.WriteLn('\t$(hide) mkdir -p $(dir $@)') + self.WriteLn('\t$(hide) $(ACP) -rpf $< $@') + self.WriteLn() + outputs.append(output) + self.WriteLn('%s = %s' % (variable, + ' '.join(map(make.QuoteSpaces, outputs)))) + extra_outputs.append('$(%s)' % variable) + self.WriteLn() + + + def WriteSourceFlags(self, spec, configs): + """Write out the flags and include paths used to compile source files for + the current target. + + Args: + spec, configs: input from gyp. + """ + for configname, config in sorted(configs.items()): + extracted_includes = [] + + self.WriteLn('\n# Flags passed to both C and C++ files.') + cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( + config.get('cflags', []) + config.get('cflags_c', [])) + extracted_includes.extend(includes_from_cflags) + self.WriteList(cflags, 'MY_CFLAGS_%s' % configname) + + self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname, + prefix='-D', quoter=make.EscapeCppDefine) + + self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') + includes = list(config.get('include_dirs', [])) + includes.extend(extracted_includes) + includes = map(Sourceify, map(self.LocalPathify, includes)) + includes = self.NormalizeIncludePaths(includes) + self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname) + + self.WriteLn('\n# Flags passed to only C++ (and not C) files.') + self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname) + + self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) ' + '$(MY_DEFS_$(GYP_CONFIGURATION))') + # Undefine ANDROID for host modules + # TODO: the source code should not use macro ANDROID to tell if it's host + # or target module. + if self.toolset == 'host': + self.WriteLn('# Undefine ANDROID for host modules') + self.WriteLn('LOCAL_CFLAGS += -UANDROID') + self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' + '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))') + self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))') + # Android uses separate flags for assembly file invocations, but gyp expects + # the same CFLAGS to be applied: + self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)') + + + def WriteSources(self, spec, configs, extra_sources): + """Write Makefile code for any 'sources' from the gyp input. + These are source files necessary to build the current target. + We need to handle shared_intermediate directory source files as + a special case by copying them to the intermediate directory and + treating them as a genereated sources. Otherwise the Android build + rules won't pick them up. + + Args: + spec, configs: input from gyp. + extra_sources: Sources generated from Actions or Rules. + """ + sources = filter(make.Compilable, spec.get('sources', [])) + generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] + extra_sources = filter(make.Compilable, extra_sources) + + # Determine and output the C++ extension used by these sources. + # We simply find the first C++ file and use that extension. + all_sources = sources + extra_sources + local_cpp_extension = '.cpp' + for source in all_sources: + (root, ext) = os.path.splitext(source) + if IsCPPExtension(ext): + local_cpp_extension = ext + break + if local_cpp_extension != '.cpp': + self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) + + # We need to move any non-generated sources that are coming from the + # shared intermediate directory out of LOCAL_SRC_FILES and put them + # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files + # that don't match our local_cpp_extension, since Android will only + # generate Makefile rules for a single LOCAL_CPP_EXTENSION. + local_files = [] + for source in sources: + (root, ext) = os.path.splitext(source) + if '$(gyp_shared_intermediate_dir)' in source: + extra_sources.append(source) + elif '$(gyp_intermediate_dir)' in source: + extra_sources.append(source) + elif IsCPPExtension(ext) and ext != local_cpp_extension: + extra_sources.append(source) + else: + local_files.append(os.path.normpath(os.path.join(self.path, source))) + + # For any generated source, if it is coming from the shared intermediate + # directory then we add a Make rule to copy them to the local intermediate + # directory first. This is because the Android LOCAL_GENERATED_SOURCES + # must be in the local module intermediate directory for the compile rules + # to work properly. If the file has the wrong C++ extension, then we add + # a rule to copy that to intermediates and use the new version. + final_generated_sources = [] + # If a source file gets copied, we still need to add the original source + # directory as header search path, for GCC searches headers in the + # directory that contains the source file by default. + origin_src_dirs = [] + for source in extra_sources: + local_file = source + if not '$(gyp_intermediate_dir)/' in local_file: + basename = os.path.basename(local_file) + local_file = '$(gyp_intermediate_dir)/' + basename + (root, ext) = os.path.splitext(local_file) + if IsCPPExtension(ext) and ext != local_cpp_extension: + local_file = root + local_cpp_extension + if local_file != source: + self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) + self.WriteLn('\tmkdir -p $(@D); cp $< $@') + origin_src_dirs.append(os.path.dirname(source)) + final_generated_sources.append(local_file) + + # We add back in all of the non-compilable stuff to make sure that the + # make rules have dependencies on them. + final_generated_sources.extend(generated_not_sources) + self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') + + origin_src_dirs = gyp.common.uniquer(origin_src_dirs) + origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) + self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') + + self.WriteList(local_files, 'LOCAL_SRC_FILES') + + # Write out the flags used to compile the source; this must be done last + # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. + self.WriteSourceFlags(spec, configs) + + + def ComputeAndroidModule(self, spec): + """Return the Android module name used for a gyp spec. + + We use the complete qualified target name to avoid collisions between + duplicate targets in different directories. We also add a suffix to + distinguish gyp-generated module names. + """ + + if int(spec.get('android_unmangled_name', 0)): + assert self.type != 'shared_library' or self.target.startswith('lib') + return self.target + + if self.type == 'shared_library': + # For reasons of convention, the Android build system requires that all + # shared library modules are named 'libfoo' when generating -l flags. + prefix = 'lib_' + else: + prefix = '' + + if spec['toolset'] == 'host': + suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp' + else: + suffix = '_gyp' + + if self.path: + middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target)) + else: + middle = make.StringToMakefileVariable(self.target) + + return ''.join([prefix, middle, suffix]) + + + def ComputeOutputParts(self, spec): + """Return the 'output basename' of a gyp spec, split into filename + ext. + + Android libraries must be named the same thing as their module name, + otherwise the linker can't find them, so product_name and so on must be + ignored if we are building a library, and the "lib" prepending is + not done for Android. + """ + assert self.type != 'loadable_module' # TODO: not supported? + + target = spec['target_name'] + target_prefix = '' + target_ext = '' + if self.type == 'static_library': + target = self.ComputeAndroidModule(spec) + target_ext = '.a' + elif self.type == 'shared_library': + target = self.ComputeAndroidModule(spec) + target_ext = '.so' + elif self.type == 'none': + target_ext = '.stamp' + elif self.type != 'executable': + print("ERROR: What output file should be generated?", + "type", self.type, "target", target) + + if self.type != 'static_library' and self.type != 'shared_library': + target_prefix = spec.get('product_prefix', target_prefix) + target = spec.get('product_name', target) + product_ext = spec.get('product_extension') + if product_ext: + target_ext = '.' + product_ext + + target_stem = target_prefix + target + return (target_stem, target_ext) + + + def ComputeOutputBasename(self, spec): + """Return the 'output basename' of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + 'libfoobar.so' + """ + return ''.join(self.ComputeOutputParts(spec)) + + + def ComputeOutput(self, spec): + """Return the 'output' (full output path) of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + '$(obj)/baz/libfoobar.so' + """ + if self.type == 'executable': + # We install host executables into shared_intermediate_dir so they can be + # run by gyp rules that refer to PRODUCT_DIR. + path = '$(gyp_shared_intermediate_dir)' + elif self.type == 'shared_library': + if self.toolset == 'host': + path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)' + else: + path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)' + else: + # Other targets just get built into their intermediate dir. + if self.toolset == 'host': + path = ('$(call intermediates-dir-for,%s,%s,true,,' + '$(GYP_HOST_VAR_PREFIX))' % (self.android_class, + self.android_module)) + else: + path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))' + % (self.android_class, self.android_module)) + + assert spec.get('product_dir') is None # TODO: not supported? + return os.path.join(path, self.ComputeOutputBasename(spec)) + + def NormalizeIncludePaths(self, include_paths): + """ Normalize include_paths. + Convert absolute paths to relative to the Android top directory. + + Args: + include_paths: A list of unprocessed include paths. + Returns: + A list of normalized include paths. + """ + normalized = [] + for path in include_paths: + if path[0] == '/': + path = gyp.common.RelativePath(path, self.android_top_dir) + normalized.append(path) + return normalized + + def ExtractIncludesFromCFlags(self, cflags): + """Extract includes "-I..." out from cflags + + Args: + cflags: A list of compiler flags, which may be mixed with "-I.." + Returns: + A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. + """ + clean_cflags = [] + include_paths = [] + for flag in cflags: + if flag.startswith('-I'): + include_paths.append(flag[2:]) + else: + clean_cflags.append(flag) + + return (clean_cflags, include_paths) + + def FilterLibraries(self, libraries): + """Filter the 'libraries' key to separate things that shouldn't be ldflags. + + Library entries that look like filenames should be converted to android + module names instead of being passed to the linker as flags. + + Args: + libraries: the value of spec.get('libraries') + Returns: + A tuple (static_lib_modules, dynamic_lib_modules, ldflags) + """ + static_lib_modules = [] + dynamic_lib_modules = [] + ldflags = [] + for libs in libraries: + # Libs can have multiple words. + for lib in libs.split(): + # Filter the system libraries, which are added by default by the Android + # build system. + if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or + lib.endswith('libgcc.a')): + continue + match = re.search(r'([^/]+)\.a$', lib) + if match: + static_lib_modules.append(match.group(1)) + continue + match = re.search(r'([^/]+)\.so$', lib) + if match: + dynamic_lib_modules.append(match.group(1)) + continue + if lib.startswith('-l'): + ldflags.append(lib) + return (static_lib_modules, dynamic_lib_modules, ldflags) + + + def ComputeDeps(self, spec): + """Compute the dependencies of a gyp spec. + + Returns a tuple (deps, link_deps), where each is a list of + filenames that will need to be put in front of make for either + building (deps) or linking (link_deps). + """ + deps = [] + link_deps = [] + if 'dependencies' in spec: + deps.extend([target_outputs[dep] for dep in spec['dependencies'] + if target_outputs[dep]]) + for dep in spec['dependencies']: + if dep in target_link_deps: + link_deps.append(target_link_deps[dep]) + deps.extend(link_deps) + return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) + + + def WriteTargetFlags(self, spec, configs, link_deps): + """Write Makefile code to specify the link flags and library dependencies. + + spec, configs: input from gyp. + link_deps: link dependency list; see ComputeDeps() + """ + # Libraries (i.e. -lfoo) + # These must be included even for static libraries as some of them provide + # implicit include paths through the build system. + libraries = gyp.common.uniquer(spec.get('libraries', [])) + static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) + + if self.type != 'static_library': + for configname, config in sorted(configs.items()): + ldflags = list(config.get('ldflags', [])) + self.WriteLn('') + self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname) + self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS') + self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) ' + '$(LOCAL_GYP_LIBS)') + + # Link dependencies (i.e. other gyp targets this target depends on) + # These need not be included for static libraries as within the gyp build + # we do not use the implicit include path mechanism. + if self.type != 'static_library': + static_link_deps = [x[1] for x in link_deps if x[0] == 'static'] + shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared'] + else: + static_link_deps = [] + shared_link_deps = [] + + # Only write the lists if they are non-empty. + if static_libs or static_link_deps: + self.WriteLn('') + self.WriteList(static_libs + static_link_deps, + 'LOCAL_STATIC_LIBRARIES') + self.WriteLn('# Enable grouping to fix circular references') + self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') + if dynamic_libs or shared_link_deps: + self.WriteLn('') + self.WriteList(dynamic_libs + shared_link_deps, + 'LOCAL_SHARED_LIBRARIES') + + + def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, + write_alias_target): + """Write Makefile code to produce the final target of the gyp spec. + + spec, configs: input from gyp. + deps, link_deps: dependency lists; see ComputeDeps() + part_of_all: flag indicating this target is part of 'all' + write_alias_target: flag indicating whether to create short aliases for this + target + """ + self.WriteLn('### Rules for final target.') + + if self.type != 'none': + self.WriteTargetFlags(spec, configs, link_deps) + + settings = spec.get('aosp_build_settings', {}) + if settings: + self.WriteLn('### Set directly by aosp_build_settings.') + for k, v in settings.items(): + if isinstance(v, list): + self.WriteList(v, k) + else: + self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v))) + self.WriteLn('') + + # Add to the set of targets which represent the gyp 'all' target. We use the + # name 'gyp_all_modules' as the Android build system doesn't allow the use + # of the Make target 'all' and because 'all_modules' is the equivalent of + # the Make target 'all' on Android. + if part_of_all and write_alias_target: + self.WriteLn('# Add target alias to "gyp_all_modules" target.') + self.WriteLn('.PHONY: gyp_all_modules') + self.WriteLn('gyp_all_modules: %s' % self.android_module) + self.WriteLn('') + + # Add an alias from the gyp target name to the Android module name. This + # simplifies manual builds of the target, and is required by the test + # framework. + if self.target != self.android_module and write_alias_target: + self.WriteLn('# Alias gyp target name.') + self.WriteLn('.PHONY: %s' % self.target) + self.WriteLn('%s: %s' % (self.target, self.android_module)) + self.WriteLn('') + + # Add the command to trigger build of the target type depending + # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY + # NOTE: This has to come last! + modifier = '' + if self.toolset == 'host': + modifier = 'HOST_' + if self.type == 'static_library': + self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier) + elif self.type == 'shared_library': + self.WriteLn('LOCAL_PRELINK_MODULE := false') + self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier) + elif self.type == 'executable': + self.WriteLn('LOCAL_CXX_STL := libc++_static') + # Executables are for build and test purposes only, so they're installed + # to a directory that doesn't get included in the system image. + self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') + self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier) + else: + self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') + self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') + if self.toolset == 'target': + self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)') + else: + self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)') + self.WriteLn() + self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') + self.WriteLn() + self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') + self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') + self.WriteLn('\t$(hide) mkdir -p $(dir $@)') + self.WriteLn('\t$(hide) touch $@') + self.WriteLn() + self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=') + + + def WriteList(self, value_list, variable=None, prefix='', + quoter=make.QuoteIfNecessary, local_pathify=False): + """Write a variable definition that is a list of values. + + E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out + foo = blaha blahb + but in a pretty-printed style. + """ + values = '' + if value_list: + value_list = [quoter(prefix + l) for l in value_list] + if local_pathify: + value_list = [self.LocalPathify(l) for l in value_list] + values = ' \\\n\t' + ' \\\n\t'.join(value_list) + self.fp.write('%s :=%s\n\n' % (variable, values)) + + + def WriteLn(self, text=''): + self.fp.write(text + '\n') + + + def LocalPathify(self, path): + """Convert a subdirectory-relative path into a normalized path which starts + with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). + Absolute paths, or paths that contain variables, are just normalized.""" + if '$(' in path or os.path.isabs(path): + # path is not a file in the project tree in this case, but calling + # normpath is still important for trimming trailing slashes. + return os.path.normpath(path) + local_path = os.path.join('$(LOCAL_PATH)', self.path, path) + local_path = os.path.normpath(local_path) + # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) + # - i.e. that the resulting path is still inside the project tree. The + # path may legitimately have ended up containing just $(LOCAL_PATH), though, + # so we don't look for a slash. + assert local_path.startswith('$(LOCAL_PATH)'), ( + 'Path %s attempts to escape from gyp path %s !)' % (path, self.path)) + return local_path + + + def ExpandInputRoot(self, template, expansion, dirname): + if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: + return template + path = template % { + 'INPUT_ROOT': expansion, + 'INPUT_DIRNAME': dirname, + } + return os.path.normpath(path) + + +def PerformBuild(data, configurations, params): + # The android backend only supports the default configuration. + options = params['options'] + makefile = os.path.abspath(os.path.join(options.toplevel_dir, + 'GypAndroid.mk')) + env = dict(os.environ) + env['ONE_SHOT_MAKEFILE'] = makefile + arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] + print('Building: %s' % arguments) + subprocess.check_call(arguments, env=env) + + +def GenerateOutput(target_list, target_dicts, data, params): + options = params['options'] + generator_flags = params.get('generator_flags', {}) + builddir_name = generator_flags.get('output_dir', 'out') + limit_to_target_all = generator_flags.get('limit_to_target_all', False) + write_alias_targets = generator_flags.get('write_alias_targets', True) + sdk_version = generator_flags.get('aosp_sdk_version', 0) + android_top_dir = os.environ.get('ANDROID_BUILD_TOP') + assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.' + + def CalculateMakefilePath(build_file, base_name): + """Determine where to write a Makefile for a given gyp file.""" + # Paths in gyp files are relative to the .gyp file, but we want + # paths relative to the source root for the master makefile. Grab + # the path of the .gyp file as the base to relativize against. + # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". + base_path = gyp.common.RelativePath(os.path.dirname(build_file), + options.depth) + # We write the file in the base_path directory. + output_file = os.path.join(options.depth, base_path, base_name) + assert not options.generator_output, ( + 'The Android backend does not support options.generator_output.') + base_path = gyp.common.RelativePath(os.path.dirname(build_file), + options.toplevel_dir) + return base_path, output_file + + # TODO: search for the first non-'Default' target. This can go + # away when we add verification that all targets have the + # necessary configurations. + default_configuration = None + toolsets = set([target_dicts[target]['toolset'] for target in target_list]) + for target in target_list: + spec = target_dicts[target] + if spec['default_configuration'] != 'Default': + default_configuration = spec['default_configuration'] + break + if not default_configuration: + default_configuration = 'Default' + + srcdir = '.' + makefile_name = 'GypAndroid' + options.suffix + '.mk' + makefile_path = os.path.join(options.toplevel_dir, makefile_name) + assert not options.generator_output, ( + 'The Android backend does not support options.generator_output.') + gyp.common.EnsureDirExists(makefile_path) + root_makefile = open(makefile_path, 'w') + + root_makefile.write(header) + + # We set LOCAL_PATH just once, here, to the top of the project tree. This + # allows all the other paths we use to be relative to the Android.mk file, + # as the Android build system expects. + root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n') + + # Find the list of targets that derive from the gyp file(s) being built. + needed_targets = set() + for build_file in params['build_files']: + for target in gyp.common.AllTargets(target_list, target_dicts, build_file): + needed_targets.add(target) + + build_files = set() + include_list = set() + android_modules = {} + for qualified_target in target_list: + build_file, target, toolset = gyp.common.ParseQualifiedTarget( + qualified_target) + relative_build_file = gyp.common.RelativePath(build_file, + options.toplevel_dir) + build_files.add(relative_build_file) + included_files = data[build_file]['included_files'] + for included_file in included_files: + # The included_files entries are relative to the dir of the build file + # that included them, so we have to undo that and then make them relative + # to the root dir. + relative_include_file = gyp.common.RelativePath( + gyp.common.UnrelativePath(included_file, build_file), + options.toplevel_dir) + abs_include_file = os.path.abspath(relative_include_file) + # If the include file is from the ~/.gyp dir, we should use absolute path + # so that relocating the src dir doesn't break the path. + if (params['home_dot_gyp'] and + abs_include_file.startswith(params['home_dot_gyp'])): + build_files.add(abs_include_file) + else: + build_files.add(relative_include_file) + + base_path, output_file = CalculateMakefilePath(build_file, + target + '.' + toolset + options.suffix + '.mk') + + spec = target_dicts[qualified_target] + configs = spec['configurations'] + + part_of_all = qualified_target in needed_targets + if limit_to_target_all and not part_of_all: + continue + + relative_target = gyp.common.QualifiedTarget(relative_build_file, target, + toolset) + writer = AndroidMkWriter(android_top_dir) + android_module = writer.Write(qualified_target, relative_target, base_path, + output_file, spec, configs, + part_of_all=part_of_all, + write_alias_target=write_alias_targets, + sdk_version=sdk_version) + if android_module in android_modules: + print('ERROR: Android module names must be unique. The following ' + 'targets both generate Android module name %s.\n %s\n %s' % + (android_module, android_modules[android_module], + qualified_target)) + return + android_modules[android_module] = qualified_target + + # Our root_makefile lives at the source root. Compute the relative path + # from there to the output_file for including. + mkfile_rel_path = gyp.common.RelativePath(output_file, + os.path.dirname(makefile_path)) + include_list.add(mkfile_rel_path) + + root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration) + root_makefile.write('GYP_VAR_PREFIX ?=\n') + root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n') + root_makefile.write('GYP_HOST_MULTILIB ?= first\n') + + # Write out the sorted list of includes. + root_makefile.write('\n') + for include_file in sorted(include_list): + root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') + root_makefile.write('\n') + + if write_alias_targets: + root_makefile.write(ALL_MODULES_FOOTER) + + root_makefile.close() diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py index 149268711b8..e966a8f23e1 100644 --- a/tools/gyp/pylib/gyp/generator/cmake.py +++ b/tools/gyp/pylib/gyp/generator/cmake.py @@ -240,7 +240,10 @@ def StringToCMakeTargetName(a): Invalid for make: ':' Invalid for unknown reasons but cause failures: '.' """ - return a.translate(string.maketrans(' /():."', '_______')) + try: + return a.translate(str.maketrans(' /():."', '_______')) + except AttributeError: + return a.translate(string.maketrans(' /():."', '_______')) def WriteActions(target_name, actions, extra_sources, extra_deps, @@ -575,7 +578,7 @@ class CMakeNamer(object): """Converts Gyp target names into CMake target names. CMake requires that target names be globally unique. One way to ensure - this is to fully qualify the names of the targets. Unfortunatly, this + this is to fully qualify the names of the targets. Unfortunately, this ends up with all targets looking like "chrome_chrome_gyp_chrome" instead of just "chrome". If this generator were only interested in building, it would be possible to fully qualify all target names, then create @@ -647,7 +650,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) if cmake_target_type is None: print('Target %s has unknown target type %s, skipping.' % - ( target_name, target_type)) + ( target_name, target_type )) return SetVariable(output, 'TARGET', target_name) diff --git a/tools/gyp/pylib/gyp/generator/eclipse.py b/tools/gyp/pylib/gyp/generator/eclipse.py index 372ceec246d..80e5fb6302c 100644 --- a/tools/gyp/pylib/gyp/generator/eclipse.py +++ b/tools/gyp/pylib/gyp/generator/eclipse.py @@ -26,6 +26,8 @@ import shlex import xml.etree.cElementTree as ET +PY3 = bytes != str + generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { @@ -97,6 +99,8 @@ def GetAllIncludeDirectories(target_list, target_dicts, proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] + if PY3: + output = output.decode('utf-8') # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: @@ -195,8 +199,8 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, """Calculate the defines for a project. Returns: - A dict that includes explict defines declared in gyp files along with all of - the default defines that the compiler uses. + A dict that includes explicit defines declared in gyp files along with all + of the default defines that the compiler uses. """ # Get defines declared in the gyp files. @@ -234,6 +238,8 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] + if PY3: + cpp_output = cpp_output.decode('utf-8') cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index 91a119c5a57..26cf88cccf2 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -12,7 +12,7 @@ # all are sourced by the top-level Makefile. This means that all # variables in .mk-files clobber one another. Be careful to use := # where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different +# that you're not relying on a variable value to last between different # .mk files. # # TODOs: @@ -234,6 +234,25 @@ def CalculateGeneratorInputInfo(params): """ +LINK_COMMANDS_OS390 = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL + +""" + + # Header of toplevel Makefile. # This should go into the build tree, but it's easier to keep it here for now. SHARED_HEADER = ("""\ @@ -317,7 +336,7 @@ def CalculateGeneratorInputInfo(params): # We write to a dep file on the side first and then rename at the end # so we can't end up with a broken dep file. depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw +DEPFLAGS = %(makedep_args)s -MF $(depfile).raw # We have to fixup the deps output in a few ways. # (1) the file output should mention the proper .o file. @@ -630,6 +649,9 @@ def Sourceify(path): def QuoteSpaces(s, quote=r'\ '): return s.replace(' ', quote) +def SourceifyAndQuoteSpaces(path): + """Convert a path to its source directory form and quote spaces.""" + return QuoteSpaces(Sourceify(path)) # TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py. def _ValidateSourcesForOSX(spec, all_sources): @@ -657,9 +679,8 @@ def _ValidateSourcesForOSX(spec, all_sources): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'libtool on OS X will generate' + - ' warnings for them.') + print(('static library %s has several files with the same basename:\n' % spec['target_name']) + + error + 'libtool on OS X will generate' + ' warnings for them.') raise GypError('Duplicate basenames in sources section, see list above') @@ -1755,8 +1776,8 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, # - The multi-output rule will have an do-nothing recipe. # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1((command or self.target).encode("utf-8")).hexdigest() - intermediate = "%s.intermediate" % (cmddigest) + cmddigest = hashlib.sha1((command or self.target).encode('utf-8')).hexdigest() + intermediate = "%s.intermediate" % cmddigest self.WriteLn('%s: %s' % (' '.join(outputs), intermediate)) self.WriteLn('\t%s' % '@:') self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate)) @@ -1956,7 +1977,7 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, "%(makefile_name)s: %(deps)s\n" "\t$(call do_cmd,regen_makefile)\n\n" % { 'makefile_name': makefile_name, - 'deps': ' '.join(Sourceify(bf) for bf in build_files), + 'deps': ' '.join(SourceifyAndQuoteSpaces(bf) for bf in build_files), 'cmd': gyp.common.EncodePOSIXShellList( [gyp_binary, '-fmake'] + gyp.RegenerateFlags(options) + @@ -2024,6 +2045,7 @@ def CalculateMakefilePath(build_file, base_name): flock_command= 'flock' copy_archive_arguments = '-af' + makedep_arguments = '-MMD' header_params = { 'default_target': default_target, 'builddir': builddir_name, @@ -2034,6 +2056,15 @@ def CalculateMakefilePath(build_file, base_name): 'extra_commands': '', 'srcdir': srcdir, 'copy_archive_args': copy_archive_arguments, + 'makedep_args': makedep_arguments, + 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), + 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), + 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), + 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), + 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), + 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), + 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), + 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), } if flavor == 'mac': flock_command = './gyp-mac-tool flock' @@ -2047,6 +2078,18 @@ def CalculateMakefilePath(build_file, base_name): header_params.update({ 'link_commands': LINK_COMMANDS_ANDROID, }) + elif flavor == 'zos': + copy_archive_arguments = '-fPR' + makedep_arguments = '-qmakedep=gcc' + header_params.update({ + 'copy_archive_args': copy_archive_arguments, + 'makedep_args': makedep_arguments, + 'link_commands': LINK_COMMANDS_OS390, + 'CC.target': GetEnvironFallback(('CC_target', 'CC'), 'njsc'), + 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), 'njsc++'), + 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'njsc'), + 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'njsc++'), + }) elif flavor == 'solaris': header_params.update({ 'flock': './gyp-flock-tool flock', @@ -2071,17 +2114,6 @@ def CalculateMakefilePath(build_file, base_name): 'flock_index': 2, }) - header_params.update({ - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), - 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), - 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), - 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), - }) - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_array = data[build_file].get('make_global_settings', []) wrappers = {} diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 1aed4ca8aa7..933042c7113 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -12,6 +12,8 @@ import subprocess import sys +from collections import OrderedDict + import gyp.common import gyp.easy_xml as easy_xml import gyp.generator.ninja as ninja_generator @@ -25,15 +27,7 @@ from gyp.common import GypError from gyp.common import OrderedSet -# TODO: Remove once bots are on 2.7, http://crbug.com/241769 -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import gyp.ordered_dict - return gyp.ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() +PY3 = bytes != str # Regular expression for validating Visual Studio GUIDs. If the GUID @@ -90,6 +84,7 @@ def _import_OrderedDict(): 'msvs_enable_winrt', 'msvs_requires_importlibrary', 'msvs_enable_winphone', + 'msvs_enable_marmasm', 'msvs_application_type_revision', 'msvs_target_platform_version', 'msvs_target_platform_minversion', @@ -126,6 +121,8 @@ def _GetDomainAndUserName(): call = subprocess.Popen(['net', 'config', 'Workstation'], stdout=subprocess.PIPE) config = call.communicate()[0] + if PY3: + config = config.decode('utf-8') username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE) username_match = username_re.search(config) if username_match: @@ -167,7 +164,7 @@ def _FixPath(path): Returns: The path with all slashes made into backslashes. """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': + if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$' and not _IsWindowsAbsPath(path): path = os.path.join(fixpath_prefix, path) path = path.replace('/', '\\') path = _NormalizedSource(path) @@ -176,6 +173,15 @@ def _FixPath(path): return path +def _IsWindowsAbsPath(path): + """ + On Cygwin systems Python needs a little help determining if a path is an absolute Windows path or not, so that + it does not treat those as relative, which results in bad paths like: + '..\C:\\some_source_code_file.cc' + """ + return path.startswith('c:') or path.startswith('C:') + + def _FixPaths(paths): """Fix each of the paths of the list.""" return [_FixPath(i) for i in paths] @@ -297,6 +303,9 @@ def _ConfigFullName(config_name, config_data): def _ConfigWindowsTargetPlatformVersion(config_data, version): + target_ver = config_data.get('msvs_windows_target_platform_version') + if target_ver and re.match(r'^\d+', target_ver): + return target_ver config_ver = config_data.get('msvs_windows_sdk_version') vers = [config_ver] if config_ver else version.compatible_sdks for ver in vers: @@ -775,8 +784,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print('Warning: MSVS may misinterpret the odd number of ' - 'quotes in ' + s, file=sys.stderr) + print('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s, file=sys.stderr) return s @@ -996,8 +1005,8 @@ def _ValidateSourcesForMSVSProject(spec, version): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'MSVC08 cannot handle that.') + print('static library %s has several files with the same basename:\n' % spec['target_name'] + + error + 'MSVC08 cannot handle that.') raise GypError('Duplicate basenames in sources section, see list above') @@ -1913,6 +1922,8 @@ def _InitNinjaFlavor(params, target_list, target_dicts): configuration = '$(Configuration)' if params.get('target_arch') == 'x64': configuration += '_x64' + if params.get('target_arch') == 'arm64': + configuration += '_arm64' spec['msvs_external_builder_out_dir'] = os.path.join( gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir), ninja_generator.ComputeOutputDir(params), @@ -2163,7 +2174,7 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies, if ext in extension_to_rule_name: group = 'rule' element = extension_to_rule_name[ext] - elif ext in ['.cc', '.cpp', '.c', '.cxx']: + elif ext in ['.cc', '.cpp', '.c', '.cxx', '.mm']: group = 'compile' element = 'ClCompile' elif ext in ['.h', '.hxx']: @@ -3106,7 +3117,7 @@ def _FinalizeMSBuildSettings(spec, configuration): _ToolAppend(msbuild_settings, 'ResourceCompile', 'AdditionalIncludeDirectories', resource_include_dirs) # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the enviroment. + # set, to prevent inheriting default libraries from the environment. _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries) _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories', @@ -3411,7 +3422,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): content += _GetMSBuildLocalProperties(project.msbuild_toolset) content += import_cpp_props_section content += import_masm_props_section - content += import_marmasm_props_section + if spec.get('msvs_enable_marmasm'): + content += import_marmasm_props_section content += _GetMSBuildExtensions(props_files_of_rules) content += _GetMSBuildPropertySheets(configurations) content += macro_section @@ -3424,7 +3436,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): content += _GetMSBuildProjectReferences(project) content += import_cpp_targets_section content += import_masm_targets_section - content += import_marmasm_targets_section + if spec.get('msvs_enable_marmasm'): + content += import_marmasm_targets_section content += _GetMSBuildExtensionTargets(targets_files_of_rules) if spec.get('msvs_external_builder'): diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index 75743e770d1..d5006bf84a0 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -744,7 +744,7 @@ def cygwin_munge(path): elif var == 'name': extra_bindings.append(('name', cygwin_munge(basename))) else: - assert var == None, repr(var) + assert var is None, repr(var) outputs = [self.GypPathToNinja(o, env) for o in outputs] if self.flavor == 'win': @@ -1880,7 +1880,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, # - The priority from low to high is gcc/g++, the 'make_global_settings' in # gyp, the environment variable. # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set + # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set # to cc/cxx. if flavor == 'win': ar = 'lib.exe' @@ -2321,15 +2321,22 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, 'stamp', description='STAMP $out', command='%s gyp-win-tool stamp $out' % sys.executable) - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) else: master_ninja.rule( 'stamp', description='STAMP $out', command='${postbuilds}touch $out') + if flavor == 'win': + master_ninja.rule( + 'copy', + description='COPY $in $out', + command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) + elif flavor == 'zos': + master_ninja.rule( + 'copy', + description='COPY $in $out', + command='rm -rf $out && cp -fRP $in $out') + else: master_ninja.rule( 'copy', description='COPY $in $out', diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py index 9242324196d..4917ba77b9d 100644 --- a/tools/gyp/pylib/gyp/generator/xcode.py +++ b/tools/gyp/pylib/gyp/generator/xcode.py @@ -541,7 +541,7 @@ def ExpandXcodeVariables(string, expansions): """ matches = _xcode_variable_re.findall(string) - if matches == None: + if matches is None: return string matches.reverse() @@ -1010,7 +1010,7 @@ def GenerateOutput(target_list, target_dicts, data, params): actions.append(action) if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider + # TODO(mark): There's a possibility for collision here. Consider # target "t" rule "A_r" and target "t_A" rule "r". makefile_name = '%s.make' % re.sub( '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index 6db204e4010..1f40abb0695 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -23,6 +23,7 @@ from gyp.common import GypError from gyp.common import OrderedSet +PY3 = bytes != str # A list of types that are treated as linkable. linkable_types = [ @@ -157,7 +158,7 @@ def GetIncludedBuildFiles(build_file_path, aux_data, included=None): in the list will be relative to the current directory. """ - if included == None: + if included is None: included = [] if build_file_path in included: @@ -222,7 +223,15 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, return data[build_file_path] if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() + # Open the build file for read ('r') with universal-newlines mode ('U') + # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n' + # which otherwise will fail eval() + if sys.platform == 'zos': + # On z/OS, universal-newlines mode treats the file as an ascii file. But since + # node-gyp produces ebcdic files, do not use that mode. + build_file_contents = open(build_file_path, 'r').read() + else: + build_file_contents = open(build_file_path, 'rU').read() else: raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) @@ -231,7 +240,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, if check: build_file_data = CheckedEval(build_file_contents) else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, + build_file_data = eval(build_file_contents, {'__builtins__': {}}, None) except SyntaxError as e: e.filename = build_file_path @@ -700,9 +709,6 @@ def FixupPlatformCommand(cmd): def ExpandVariables(input, phase, variables, build_file): # Look for the pattern that gets expanded into variables - def to_utf8(s): - return s if isinstance(s, str) else s.decode('utf-8') - if phase == PHASE_EARLY: variable_re = early_variable_re expansion_symbol = '<' @@ -906,8 +912,9 @@ def to_utf8(s): (e, contents, build_file)) p_stdout, p_stderr = p.communicate('') - p_stdout = to_utf8(p_stdout) - p_stderr = to_utf8(p_stderr) + if PY3: + p_stdout = p_stdout.decode('utf-8') + p_stderr = p_stderr.decode('utf-8') if p.wait() != 0 or p_stderr: sys.stderr.write(p_stderr) @@ -1061,7 +1068,7 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file): else: false_dict = None i = i + 2 - if result == None: + if result is None: result = EvalSingleCondition( cond_expr, true_dict, false_dict, phase, variables, build_file) @@ -1072,7 +1079,7 @@ def EvalSingleCondition( cond_expr, true_dict, false_dict, phase, variables, build_file): """Returns true_dict if cond_expr evaluates to true, and false_dict otherwise.""" - # Do expansions on the condition itself. Since the conditon can naturally + # Do expansions on the condition itself. Since the condition can naturally # contain variable references without needing to resort to GYP expansion # syntax, this is of dubious value for variables, but someone might want to # use a command expansion directly inside a condition. @@ -1089,7 +1096,7 @@ def EvalSingleCondition( else: ast_code = compile(cond_expr_expanded, '', 'eval') cached_conditions_asts[cond_expr_expanded] = ast_code - env = {'__builtins__': None, 'v': StrictVersion} + env = {'__builtins__': {}, 'v': StrictVersion} if eval(ast_code, env, variables): return true_dict return false_dict @@ -1178,7 +1185,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): continue if the_dict_key == 'variables' and variable_name in the_dict: # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a + # variables dict (making |variables| a variables sub-dict of a # variables dict), use the_dict's definition. value = the_dict[variable_name] else: @@ -1608,7 +1615,7 @@ def Visit(node, path): def DirectDependencies(self, dependencies=None): """Returns a list of just direct dependencies.""" - if dependencies == None: + if dependencies is None: dependencies = [] for dependency in self.dependencies: @@ -1636,7 +1643,7 @@ def _AddImportedDependencies(self, targets, dependencies=None): public entry point. """ - if dependencies == None: + if dependencies is None: dependencies = [] index = 0 @@ -1870,7 +1877,7 @@ def VerifyNoGYPFileCircularDependencies(targets): continue dependency_node = dependency_nodes.get(dependency_build_file) if not dependency_node: - raise GypError("Dependancy '%s' not found" % dependency_build_file) + raise GypError("Dependency '%s' not found" % dependency_build_file) if dependency_node not in build_file_node.dependencies: build_file_node.dependencies.append(dependency_node) dependency_node.dependents.append(build_file_node) @@ -2040,7 +2047,7 @@ def MakePathRelative(to_file, fro_file, item): gyp.common.RelativePath(os.path.dirname(fro_file), os.path.dirname(to_file)), item)).replace('\\', '/') - if item[-1] == '/': + if item.endswith('/'): ret += '/' return ret @@ -2288,7 +2295,7 @@ def SetUpConfigurations(target, target_dict): merged_configurations[configuration]) # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): + for configuration in list(target_dict['configurations']): old_configuration_dict = target_dict['configurations'][configuration] if old_configuration_dict.get('abstract'): del target_dict['configurations'][configuration] @@ -2531,8 +2538,8 @@ def ValidateSourcesInTarget(target, target_dict, build_file, error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'libtool on Mac cannot handle that. Use ' + print('static library %s has several files with the same basename:\n' % target + + error + 'libtool on Mac cannot handle that. Use ' '--no-duplicate-basename-check to disable this validation.') raise GypError('Duplicate basenames in sources section, see list above') diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py index c4c4a6df130..781a8633bc2 100755 --- a/tools/gyp/pylib/gyp/mac_tool.py +++ b/tools/gyp/pylib/gyp/mac_tool.py @@ -478,8 +478,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print('cannot find mobile provisioning for %s' % bundle_identifier, - file=sys.stderr) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -500,8 +499,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print('cannot find mobile provisioning for %s' % bundle_identifier, - file=sys.stderr) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py index e130b53271c..d42e2e47b98 100644 --- a/tools/gyp/pylib/gyp/msvs_emulation.py +++ b/tools/gyp/pylib/gyp/msvs_emulation.py @@ -16,6 +16,7 @@ import gyp.MSVSUtil import gyp.MSVSVersion +PY3 = bytes != str windows_quoter_regex = re.compile(r'(\\*)"') @@ -130,7 +131,10 @@ def _FindDirectXInstallation(): # Setup params to pass to and attempt to launch reg.exe. cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s'] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - for line in p.communicate()[0].splitlines(): + stdout = p.communicate()[0] + if PY3: + stdout = stdout.decode('utf-8') + for line in stdout.splitlines(): if 'InstallPath' in line: dxsdk_dir = line.split(' ')[3] + "\\" @@ -241,7 +245,11 @@ def GetExtension(self): def GetVSMacroEnv(self, base_to_build=None, config=None): """Get a dict of variables mapping internal VS macro names to their gyp equivalents.""" - target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' + target_arch = self.GetArch(config) + if target_arch == 'x86': + target_platform = 'Win32' + else: + target_platform = target_arch target_name = self.spec.get('product_prefix', '') + \ self.spec.get('product_name', self.spec['target_name']) target_dir = base_to_build + '\\' if base_to_build else '' @@ -304,7 +312,7 @@ def GetArch(self, config): if not platform: # If no specific override, use the configuration's. platform = configuration_platform # Map from platform to architecture. - return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86') + return {'Win32': 'x86', 'x64': 'x64', 'ARM64': 'arm64'}.get(platform, 'x86') def _TargetConfig(self, config): """Returns the target-specific configuration.""" @@ -379,7 +387,7 @@ def GetCompilerPdbName(self, config, expand_special): return pdbname def GetMapFileName(self, config, expand_special): - """Gets the explicitly overriden map file name for a target or returns None + """Gets the explicitly overridden map file name for a target or returns None if it's not set.""" config = self._TargetConfig(config) map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config) @@ -575,7 +583,10 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, 'VCLinkerTool', append=ldflags) self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) ld('GenerateDebugInformation', map={'true': '/DEBUG'}) - ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'}, + # TODO: These 'map' values come from machineTypeOption enum, + # and does not have an official value for ARM64 in VS2017 (yet). + # It needs to verify the ARM64 value when machineTypeOption is updated. + ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM', '18': 'ARM64'}, prefix='/MACHINE:') ldflags.extend(self._GetAdditionalLibraryDirectories( 'VCLinkerTool', config, gyp_to_build_path)) @@ -872,7 +883,9 @@ def midl(name, default=None): ('iid', iid), ('proxy', proxy)] # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64' + target_platform = self.GetArch(config) + if target_platform == 'x86': + target_platform = 'win32' flags = ['/char', 'signed', '/env', target_platform, '/Oicf'] return outdir, output, variables, flags @@ -1045,6 +1058,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, popen = subprocess.Popen( args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) variables, _ = popen.communicate() + if PY3: + variables = variables.decode('utf-8') if popen.returncode != 0: raise Exception('"%s" failed with error %d' % (args, popen.returncode)) env = _ExtractImportantEnvironment(variables) @@ -1066,6 +1081,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i')) popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) output, _ = popen.communicate() + if PY3: + output = output.decode('utf-8') cl_paths[arch] = _ExtractCLPath(output) return cl_paths diff --git a/tools/gyp/pylib/gyp/ordered_dict.py b/tools/gyp/pylib/gyp/ordered_dict.py deleted file mode 100644 index 6fe9c1f6c7c..00000000000 --- a/tools/gyp/pylib/gyp/ordered_dict.py +++ /dev/null @@ -1,289 +0,0 @@ -# Unmodified from http://code.activestate.com/recipes/576693/ -# other than to add MIT license header (as specified on page, but not in code). -# Linked from Python documentation here: -# http://docs.python.org/2/library/collections.html#collections.OrderedDict -# -# This should be deleted once Py2.7 is available on all bots, see -# http://crbug.com/241769. -# -# Copyright (c) 2009 Raymond Hettinger. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def items(self): - 'od.items -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - # Suppress 'OrderedDict.update: Method has no argument': - # pylint: disable=E0211 - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py index ab6db1c4e04..cfdacb0d7cc 100755 --- a/tools/gyp/pylib/gyp/win_tool.py +++ b/tools/gyp/pylib/gyp/win_tool.py @@ -20,6 +20,7 @@ import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +PY3 = bytes != str # A regex matching an argument corresponding to the output filename passed to # link.exe. @@ -132,6 +133,8 @@ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = link.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith(' Creating library ') and not line.startswith('Generating code') and @@ -223,6 +226,8 @@ def ExecManifestWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if line and 'manifest authoring warning 81010002' not in line: print(line) @@ -255,6 +260,8 @@ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') # Filter junk out of stdout, and write filtered versions. Output we want # to filter is pairs of lines that look like this: # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl @@ -274,6 +281,8 @@ def ExecAsmWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith('Copyright (C) Microsoft Corporation') and not line.startswith('Microsoft (R) Macro Assembler') and @@ -289,6 +298,8 @@ def ExecRcWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and not line.startswith('Copyright (C) Microsoft Corporation') and diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py index 905bec7be34..c3daba5fb82 100644 --- a/tools/gyp/pylib/gyp/xcode_emulation.py +++ b/tools/gyp/pylib/gyp/xcode_emulation.py @@ -854,7 +854,7 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. + current gyp file to paths relative to the build directory. """ self.configname = configname ldflags = [] @@ -1002,7 +1002,7 @@ def GetPerTargetSetting(self, setting, default=None): def _GetStripPostbuilds(self, configname, output_binary, quiet): """Returns a list of shell commands that contain the shell commands - neccessary to strip this target's binary. These should be run as postbuilds + necessary to strip this target's binary. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname @@ -1037,7 +1037,7 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet): def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): """Returns a list of shell commands that contain the shell commands - neccessary to massage this target's debug information. These should be run + necessary to massage this target's debug information. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname @@ -1173,7 +1173,7 @@ def _AdjustLibrary(self, library, config_name=None): # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the # library order and cause collision when building Chrome. # - # Instead substitude ".tbd" to ".dylib" in the generated project when the + # Instead substitute ".tbd" to ".dylib" in the generated project when the # following conditions are both true: # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", # - the ".dylib" file does not exists but a ".tbd" file do. @@ -1476,7 +1476,7 @@ def GetStdout(cmdlist): def MergeGlobalXcodeSettingsToSpec(global_dict, spec): """Merges the global xcode_settings dictionary into each configuration of the target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precendence. + xcode_settings dict, the local key gets precedence. """ # The xcode generator special-cases global xcode_settings and does something # that amounts to merging in the global xcode_settings into each local @@ -1522,7 +1522,7 @@ def GetMacBundleResources(product_dir, xcode_settings, resources): output = dest # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. + # to keep the generators more interchangeable. assert ' ' not in res, ( "Spaces in resource filenames not supported (%s)" % res) @@ -1564,14 +1564,14 @@ def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): relative to the build directory. xcode_settings: The XcodeSettings of the current target. gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. + current gyp file to paths relative to the build directory. """ info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') if not info_plist: return None, None, [], {} # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. + # to keep the generators more interchangeable. assert ' ' not in info_plist, ( "Spaces in Info.plist filenames not supported (%s)" % info_plist) diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index 0534f51fe5c..1e950dce8f0 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -220,7 +220,7 @@ class XCObject(object): an empty string ("", in the case of property_type str) or list ([], in the case of is_list True) from being set for the property. - default: Optional. If is_requried is True, default may be set + default: Optional. If is_required is True, default may be set to provide a default value for objects that do not supply their own value. If is_required is True and default is not provided, users of the class must supply their own diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py index d01c692edcf..633048a59ad 100755 --- a/tools/gyp/tools/pretty_gyp.py +++ b/tools/gyp/tools/pretty_gyp.py @@ -18,7 +18,7 @@ # Regex to remove quoted strings when we're counting braces. # It takes into account quoted quotes, and makes sure that the quotes match. # NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. +# cases where an escaped quote is preceded by an escaped backslash. QUOTE_RE_STR = r'(?P[\'"])(.*?)(? The files in this directory were written for the Node.js v0.12 effort. diff --git a/tools/lint-md.js b/tools/lint-md.js index 495f9536db6..e6eecf265c5 100644 --- a/tools/lint-md.js +++ b/tools/lint-md.js @@ -40838,62 +40838,10 @@ var remark = unified_1() .use(remarkStringify) .freeze(); -const _from = "remark@^11.0.2"; -const _id = "remark@11.0.2"; -const _inBundle = false; -const _integrity = "sha512-bh+eJgn8wgmbHmIBOuwJFdTVRVpl3fcVP6HxmpPWO0ULGP9Qkh6INJh0N5Uy7GqlV7DQYGoqaKiEIpM5LLvJ8w=="; -const _location = "/remark"; -const _phantomChildren = { -}; -const _requested = { - type: "range", - registry: true, - raw: "remark@^11.0.2", - name: "remark", - escapedName: "remark", - rawSpec: "^11.0.2", - saveSpec: null, - fetchSpec: "^11.0.2" -}; -const _requiredBy = [ - "/" -]; -const _resolved = "https://registry.npmjs.org/remark/-/remark-11.0.2.tgz"; -const _shasum = "12b90ea100ac3362b1976fa87a6e4e0ab5968202"; -const _spec = "remark@^11.0.2"; -const _where = "/mnt/c/orgs/nodejs/node-runtime/tools/node-lint-md-cli-rollup"; -const author = { - name: "Titus Wormer", - email: "tituswormer@gmail.com", - url: "https://wooorm.com" -}; -const bugs = { - url: "https://github.com/remarkjs/remark/issues" -}; -const bundleDependencies = false; -const contributors = [ - { - name: "Titus Wormer", - email: "tituswormer@gmail.com", - url: "https://wooorm.com" - } -]; -const dependencies = { - "remark-parse": "^7.0.0", - "remark-stringify": "^7.0.0", - unified: "^8.2.0" -}; -const deprecated$1 = false; +const name$1 = "remark"; +const version$1 = "11.0.2"; const description = "Markdown processor powered by plugins"; -const files = [ - "index.js", - "types/index.d.ts" -]; -const funding = { - type: "opencollective", - url: "https://opencollective.com/unified" -}; -const homepage = "https://remark.js.org"; +const license = "MIT"; const keywords = [ "unified", "remark", @@ -40907,83 +40855,77 @@ const keywords = [ "stringify", "process" ]; -const license = "MIT"; -const name$1 = "remark"; -const repository = { - type: "git", - url: "https://github.com/remarkjs/remark/tree/master/packages/remark" +const homepage = "https://remark.js.org"; +const repository = "https://github.com/remarkjs/remark/tree/master/packages/remark"; +const bugs = "https://github.com/remarkjs/remark/issues"; +const funding = { + type: "opencollective", + url: "https://opencollective.com/unified" +}; +const author = "Titus Wormer (https://wooorm.com)"; +const contributors = [ + "Titus Wormer (https://wooorm.com)" +]; +const files = [ + "index.js", + "types/index.d.ts" +]; +const types = "types/index.d.ts"; +const dependencies = { + "remark-parse": "^7.0.0", + "remark-stringify": "^7.0.0", + unified: "^8.2.0" }; const scripts = { test: "tape test.js" }; -const types = "types/index.d.ts"; -const version$1 = "11.0.2"; const xo = false; +const _resolved = "https://registry.npmjs.org/remark/-/remark-11.0.2.tgz"; +const _integrity = "sha512-bh+eJgn8wgmbHmIBOuwJFdTVRVpl3fcVP6HxmpPWO0ULGP9Qkh6INJh0N5Uy7GqlV7DQYGoqaKiEIpM5LLvJ8w=="; +const _from = "remark@11.0.2"; var _package = { - _from: _from, - _id: _id, - _inBundle: _inBundle, - _integrity: _integrity, - _location: _location, - _phantomChildren: _phantomChildren, - _requested: _requested, - _requiredBy: _requiredBy, - _resolved: _resolved, - _shasum: _shasum, - _spec: _spec, - _where: _where, - author: author, - bugs: bugs, - bundleDependencies: bundleDependencies, - contributors: contributors, - dependencies: dependencies, - deprecated: deprecated$1, + name: name$1, + version: version$1, description: description, - files: files, - funding: funding, - homepage: homepage, - keywords: keywords, license: license, - name: name$1, + keywords: keywords, + homepage: homepage, repository: repository, - scripts: scripts, + bugs: bugs, + funding: funding, + author: author, + contributors: contributors, + files: files, types: types, - version: version$1, - xo: xo + dependencies: dependencies, + scripts: scripts, + xo: xo, + _resolved: _resolved, + _integrity: _integrity, + _from: _from }; var _package$1 = /*#__PURE__*/Object.freeze({ __proto__: null, - _from: _from, - _id: _id, - _inBundle: _inBundle, - _integrity: _integrity, - _location: _location, - _phantomChildren: _phantomChildren, - _requested: _requested, - _requiredBy: _requiredBy, - _resolved: _resolved, - _shasum: _shasum, - _spec: _spec, - _where: _where, - author: author, - bugs: bugs, - bundleDependencies: bundleDependencies, - contributors: contributors, - dependencies: dependencies, - deprecated: deprecated$1, + name: name$1, + version: version$1, description: description, - files: files, - funding: funding, - homepage: homepage, - keywords: keywords, license: license, - name: name$1, + keywords: keywords, + homepage: homepage, repository: repository, - scripts: scripts, + bugs: bugs, + funding: funding, + author: author, + contributors: contributors, + files: files, types: types, - version: version$1, + dependencies: dependencies, + scripts: scripts, xo: xo, + _resolved: _resolved, + _integrity: _integrity, + _from: _from, 'default': _package }); @@ -41001,7 +40943,7 @@ const dependencies$1 = { "markdown-extensions": "^1.1.1", remark: "^11.0.2", "remark-lint": "^6.0.5", - "remark-preset-lint-node": "^1.12.0", + "remark-preset-lint-node": "^1.13.0", "unified-args": "^7.1.0" }; const main = "dist/index.js"; @@ -42476,14 +42418,19 @@ var plur = (word, plural, count) => { return Math.abs(count) === 1 ? word : plural; }; -var unistUtilPosition = createCommonjsModule(function (module, exports) { +var start$1 = factory$8('start'); +var end = factory$8('end'); -var position = exports; +var unistUtilPosition = position$1; -position.start = factory('start'); -position.end = factory('end'); +position$1.start = start$1; +position$1.end = end; -function factory(type) { +function position$1(node) { + return {start: start$1(node), end: end(node)} +} + +function factory$8(type) { point.displayName = type; return point @@ -42498,7 +42445,6 @@ function factory(type) { } } } -}); var unistUtilGenerated = generated; @@ -42519,7 +42465,7 @@ var remarkLintListItemBulletIndent = unifiedLintRule( listItemBulletIndent ); -var start$1 = unistUtilPosition.start; +var start$2 = unistUtilPosition.start; function listItemBulletIndent(tree, file) { var contents = String(file); @@ -42536,8 +42482,8 @@ function listItemBulletIndent(tree, file) { var reason; if (!unistUtilGenerated(item)) { - final = start$1(item.children[0]); - indent = contents.slice(start$1(item).offset, final.offset).match(/^\s*/)[0] + final = start$2(item.children[0]); + indent = contents.slice(start$2(item).offset, final.offset).match(/^\s*/)[0] .length; if (indent !== 0) { @@ -42558,7 +42504,7 @@ function listItemBulletIndent(tree, file) { var remarkLintListItemIndent = unifiedLintRule('remark-lint:list-item-indent', listItemIndent); -var start$2 = unistUtilPosition.start; +var start$3 = unistUtilPosition.start; var styles = {'tab-size': true, mixed: true, space: true}; @@ -42586,7 +42532,7 @@ function listItemIndent(tree, file, pref) { function visitItem(item) { var head = item.children[0]; - var final = start$2(head); + var final = start$3(head); var marker; var bulletSize; var style; @@ -42594,7 +42540,7 @@ function listItemIndent(tree, file, pref) { var reason; marker = contents - .slice(start$2(item).offset, final.offset) + .slice(start$3(item).offset, final.offset) .replace(/\[[x ]?]\s*$/i, ''); bulletSize = marker.trimRight().length; @@ -42645,8 +42591,8 @@ var remarkLintNoAutoLinkWithoutProtocol = unifiedLintRule( noAutoLinkWithoutProtocol ); -var start$3 = unistUtilPosition.start; -var end = unistUtilPosition.end; +var start$4 = unistUtilPosition.start; +var end$1 = unistUtilPosition.end; // Protocol expression. // See: . @@ -42664,8 +42610,8 @@ function noAutoLinkWithoutProtocol(tree, file) { children = node.children; if ( - start$3(node).column === start$3(children[0]).column - 1 && - end(node).column === end(children[children.length - 1]).column + 1 && + start$4(node).column === start$4(children[0]).column - 1 && + end$1(node).column === end$1(children[children.length - 1]).column + 1 && !protocol$2.test(mdastUtilToString(node)) ) { file.message(reason, node); @@ -42730,8 +42676,8 @@ function noBlockquoteWithoutMarker(tree, file) { var remarkLintNoLiteralUrls = unifiedLintRule('remark-lint:no-literal-urls', noLiteralURLs); -var start$4 = unistUtilPosition.start; -var end$1 = unistUtilPosition.end; +var start$5 = unistUtilPosition.start; +var end$2 = unistUtilPosition.end; var mailto$3 = 'mailto:'; var reason$2 = 'Don’t use literal URLs without angle brackets'; @@ -42744,8 +42690,8 @@ function noLiteralURLs(tree, file) { if ( !unistUtilGenerated(node) && - start$4(node).column === start$4(children[0]).column && - end$1(node).column === end$1(children[children.length - 1]).column && + start$5(node).column === start$5(children[0]).column && + end$2(node).column === end$2(children[children.length - 1]).column && (node.url === mailto$3 + value || node.url === value) ) { file.message(reason$2, node); @@ -42758,7 +42704,7 @@ var remarkLintOrderedListMarkerStyle = unifiedLintRule( orderedListMarkerStyle ); -var start$5 = unistUtilPosition.start; +var start$6 = unistUtilPosition.start; var styles$1 = { ')': true, @@ -42793,7 +42739,7 @@ function orderedListMarkerStyle(tree, file, pref) { if (!unistUtilGenerated(child)) { marker = contents - .slice(start$5(child).offset, start$5(child.children[0]).offset) + .slice(start$6(child).offset, start$6(child.children[0]).offset) .replace(/\s|\d/g, '') .replace(/\[[x ]?]\s*$/i, ''); @@ -42914,8 +42860,8 @@ var remarkLintNoHeadingContentIndent = unifiedLintRule( noHeadingContentIndent ); -var start$6 = unistUtilPosition.start; -var end$2 = unistUtilPosition.end; +var start$7 = unistUtilPosition.start; +var end$3 = unistUtilPosition.end; function noHeadingContentIndent(tree, file) { var contents = String(file); @@ -42943,7 +42889,7 @@ function noHeadingContentIndent(tree, file) { type = mdastUtilHeadingStyle(node, 'atx'); if (type === 'atx' || type === 'atx-closed') { - initial = start$6(node); + initial = start$7(node); index = initial.offset; char = contents.charAt(index); @@ -42957,7 +42903,7 @@ function noHeadingContentIndent(tree, file) { } index = depth + (index - initial.offset); - head = start$6(children[0]).column; + head = start$7(children[0]).column; // Ignore empty headings. if (!head) { @@ -42975,15 +42921,15 @@ function noHeadingContentIndent(tree, file) { plur('space', diff) + ' before this heading’s content'; - file.message(reason, start$6(children[0])); + file.message(reason, start$7(children[0])); } } // Closed ATX-heading always must have a space between their content and the // final hashes, thus, there is no `add x spaces`. if (type === 'atx-closed') { - final = end$2(children[children.length - 1]); - diff = end$2(node).column - final.column - 1 - depth; + final = end$3(children[children.length - 1]); + diff = end$3(node).column - final.column - 1 - depth; if (diff) { reason = @@ -43213,8 +43159,8 @@ var remarkLintCheckboxCharacterStyle = unifiedLintRule( checkboxCharacterStyle ); -var start$7 = unistUtilPosition.start; -var end$3 = unistUtilPosition.end; +var start$8 = unistUtilPosition.start; +var end$4 = unistUtilPosition.end; var checked = {x: true, X: true}; var unchecked = {' ': true, '\t': true}; @@ -43259,8 +43205,8 @@ function checkboxCharacterStyle(tree, file, pref) { } type = types$1[node.checked]; - initial = start$7(node).offset; - final = (node.children.length === 0 ? end$3(node) : start$7(node.children[0])) + initial = start$8(node).offset; + final = (node.children.length === 0 ? end$4(node) : start$8(node.children[0])) .offset; // For a checkbox to be parsed, it must be followed by a whitespace. @@ -43298,8 +43244,8 @@ var remarkLintCheckboxContentIndent = unifiedLintRule( checkboxContentIndent ); -var start$8 = unistUtilPosition.start; -var end$4 = unistUtilPosition.end; +var start$9 = unistUtilPosition.start; +var end$5 = unistUtilPosition.end; var reason$9 = 'Checkboxes should be followed by a single character'; @@ -43319,9 +43265,9 @@ function checkboxContentIndent(tree, file) { return } - initial = start$8(node).offset; + initial = start$9(node).offset; /* istanbul ignore next - hard to test, couldn’t find a case. */ - final = (node.children.length === 0 ? end$4(node) : start$8(node.children[0])) + final = (node.children.length === 0 ? end$5(node) : start$9(node.children[0])) .offset; while (/[^\S\n]/.test(contents.charAt(final))) { @@ -43343,8 +43289,8 @@ function checkboxContentIndent(tree, file) { var remarkLintCodeBlockStyle = unifiedLintRule('remark-lint:code-block-style', codeBlockStyle); -var start$9 = unistUtilPosition.start; -var end$5 = unistUtilPosition.end; +var start$a = unistUtilPosition.start; +var end$6 = unistUtilPosition.end; var styles$2 = {null: true, fenced: true, indented: true}; @@ -43377,8 +43323,8 @@ function codeBlockStyle(tree, file, pref) { // Get the style of `node`. function check(node) { - var initial = start$9(node).offset; - var final = end$5(node).offset; + var initial = start$a(node).offset; + var final = end$6(node).offset; if (unistUtilGenerated(node)) { return null @@ -43415,8 +43361,8 @@ function definitionSpacing(tree, file) { var remarkLintFencedCodeFlag = unifiedLintRule('remark-lint:fenced-code-flag', fencedCodeFlag); -var start$a = unistUtilPosition.start; -var end$6 = unistUtilPosition.end; +var start$b = unistUtilPosition.start; +var end$7 = unistUtilPosition.end; var fence$2 = /^ {0,3}([~`])\1{2,}/; var reasonInvalid = 'Invalid code-language flag'; @@ -43447,7 +43393,7 @@ function fencedCodeFlag(tree, file, pref) { file.message(reasonInvalid, node); } } else { - value = contents.slice(start$a(node).offset, end$6(node).offset); + value = contents.slice(start$b(node).offset, end$7(node).offset); if (!allowEmpty && fence$2.test(value)) { file.message(reasonMissing, node); @@ -43520,7 +43466,7 @@ function fileExtension(tree, file, pref) { var remarkLintFinalDefinition = unifiedLintRule('remark-lint:final-definition', finalDefinition); -var start$b = unistUtilPosition.start; +var start$c = unistUtilPosition.start; function finalDefinition(tree, file) { var last = null; @@ -43528,7 +43474,7 @@ function finalDefinition(tree, file) { unistUtilVisit(tree, visitor, true); function visitor(node) { - var line = start$b(node).line; + var line = start$c(node).line; // Ignore generated nodes. if (node.type === 'root' || unistUtilGenerated(node)) { @@ -43609,8 +43555,8 @@ function headingStyle(tree, file, pref) { var remarkLintMaximumLineLength = unifiedLintRule('remark-lint:maximum-line-length', maximumLineLength); -var start$c = unistUtilPosition.start; -var end$7 = unistUtilPosition.end; +var start$d = unistUtilPosition.start; +var end$8 = unistUtilPosition.end; function maximumLineLength(tree, file, pref) { var style = typeof pref === 'number' && !isNaN(pref) ? pref : 80; @@ -43650,8 +43596,8 @@ function maximumLineLength(tree, file, pref) { return } - initial = start$c(node); - final = end$7(node); + initial = start$d(node); + final = end$8(node); // No whitelisting when starting after the border, or ending before it. if (initial.column > style || final.column < style) { @@ -43661,7 +43607,7 @@ function maximumLineLength(tree, file, pref) { // No whitelisting when there’s whitespace after the link. if ( next && - start$c(next).line === initial.line && + start$d(next).line === initial.line && (!next.value || /^(.+?[ \t].+?)/.test(next.value)) ) { return @@ -43673,7 +43619,7 @@ function maximumLineLength(tree, file, pref) { function ignore(node) { /* istanbul ignore else - Hard to test, as we only run this case on `position: true` */ if (!unistUtilGenerated(node)) { - whitelist(start$c(node).line - 1, end$7(node).line); + whitelist(start$d(node).line - 1, end$8(node).line); } } @@ -43794,7 +43740,7 @@ function noFileNameOuterDashes(tree, file) { var remarkLintNoHeadingIndent = unifiedLintRule('remark-lint:no-heading-indent', noHeadingIndent); -var start$d = unistUtilPosition.start; +var start$e = unistUtilPosition.start; function noHeadingIndent(tree, file) { var contents = String(file); @@ -43813,7 +43759,7 @@ function noHeadingIndent(tree, file) { return } - initial = start$d(node); + initial = start$e(node); begin = initial.offset; index = begin - 1; @@ -43839,7 +43785,7 @@ function noHeadingIndent(tree, file) { } } -var start$e = unistUtilPosition.start; +var start$f = unistUtilPosition.start; @@ -43862,7 +43808,7 @@ function noMultipleToplevelHeadings(tree, file, pref) { node ); } else { - duplicate = unistUtilStringifyPosition(start$e(node)); + duplicate = unistUtilStringifyPosition(start$f(node)); } } } @@ -43987,10 +43933,214 @@ function noTrailingSpaces(ast, file) { } } +var convert_1$1 = convert$2; + +function convert$2(test) { + if (typeof test === 'string') { + return typeFactory$1(test) + } + + if (test === null || test === undefined) { + return ok$2 + } + + if (typeof test === 'object') { + return ('length' in test ? anyFactory$1 : matchesFactory$1)(test) + } + + if (typeof test === 'function') { + return test + } + + throw new Error('Expected function, string, or object as test') +} + +function convertAll$1(tests) { + var results = []; + var length = tests.length; + var index = -1; + + while (++index < length) { + results[index] = convert$2(tests[index]); + } + + return results +} + +// Utility assert each property in `test` is represented in `node`, and each +// values are strictly equal. +function matchesFactory$1(test) { + return matches + + function matches(node) { + var key; + + for (key in test) { + if (node[key] !== test[key]) { + return false + } + } + + return true + } +} + +function anyFactory$1(tests) { + var checks = convertAll$1(tests); + var length = checks.length; + + return matches + + function matches() { + var index = -1; + + while (++index < length) { + if (checks[index].apply(this, arguments)) { + return true + } + } + + return false + } +} + +// Utility to convert a string into a function which checks a given node’s type +// for said string. +function typeFactory$1(test) { + return type + + function type(node) { + return Boolean(node && node.type === test) + } +} + +// Utility to return true. +function ok$2() { + return true +} + +var unistUtilVisitParents$1 = visitParents$1; + + + +var CONTINUE$2 = true; +var SKIP$2 = 'skip'; +var EXIT$2 = false; + +visitParents$1.CONTINUE = CONTINUE$2; +visitParents$1.SKIP = SKIP$2; +visitParents$1.EXIT = EXIT$2; + +function visitParents$1(tree, test, visitor, reverse) { + var is; + + if (typeof test === 'function' && typeof visitor !== 'function') { + reverse = visitor; + visitor = test; + test = null; + } + + is = convert_1$1(test); + + one(tree, null, []); + + // Visit a single node. + function one(node, index, parents) { + var result = []; + var subresult; + + if (!test || is(node, index, parents[parents.length - 1] || null)) { + result = toResult$1(visitor(node, parents)); + + if (result[0] === EXIT$2) { + return result + } + } + + if (node.children && result[0] !== SKIP$2) { + subresult = toResult$1(all(node.children, parents.concat(node))); + return subresult[0] === EXIT$2 ? subresult : result + } + + return result + } + + // Visit children in `parent`. + function all(children, parents) { + var min = -1; + var step = reverse ? -1 : 1; + var index = (reverse ? children.length : min) + step; + var result; + + while (index > min && index < children.length) { + result = one(children[index], index, parents); + + if (result[0] === EXIT$2) { + return result + } + + index = typeof result[1] === 'number' ? result[1] : index + step; + } + } +} + +function toResult$1(value) { + if (value !== null && typeof value === 'object' && 'length' in value) { + return value + } + + if (typeof value === 'number') { + return [CONTINUE$2, value] + } + + return [value] +} + +var unistUtilVisit$1 = visit$1; + + + +var CONTINUE$3 = unistUtilVisitParents$1.CONTINUE; +var SKIP$3 = unistUtilVisitParents$1.SKIP; +var EXIT$3 = unistUtilVisitParents$1.EXIT; + +visit$1.CONTINUE = CONTINUE$3; +visit$1.SKIP = SKIP$3; +visit$1.EXIT = EXIT$3; + +function visit$1(tree, test, visitor, reverse) { + if (typeof test === 'function' && typeof visitor !== 'function') { + reverse = visitor; + visitor = test; + test = null; + } + + unistUtilVisitParents$1(tree, test, overload, reverse); + + function overload(node, parents) { + var parent = parents[parents.length - 1]; + var index = parent ? parent.children.indexOf(node) : null; + return visitor(node, index, parent) + } +} + var remarkLintProhibitedStrings = unifiedLintRule('remark-lint:prohibited-strings', prohibitedStrings); function testProhibited(val, content) { - const re = new RegExp(`(\\.|@[a-z0-9/-]*)?\\b(${val.no})\\b(\\.\\w)?`, 'g'); + let regexpString = '(\\.|@[a-z0-9/-]*)?'; + + // If it starts with a letter, make sure it is a word break. + if (/^\b/.test(val.no)) { + regexpString += '\\b'; + } + regexpString += `(${val.no})`; + + // If it ends with a letter, make sure it is a word break. + if (/\b$/.test(val.no)) { + regexpString += '\\b'; + } + regexpString += '(\\.\\w)?'; + const re = new RegExp(regexpString, 'g'); let result = null; while (result = re.exec(content)) { @@ -44003,7 +44153,7 @@ function testProhibited(val, content) { } function prohibitedStrings(ast, file, strings) { - unistUtilVisit(ast, 'text', checkText); + unistUtilVisit$1(ast, 'text', checkText); function checkText(node) { const content = node.value; @@ -44024,8 +44174,8 @@ var rule = unifiedLintRule; var remarkLintRuleStyle = rule('remark-lint:rule-style', ruleStyle); -var start$f = unistUtilPosition.start; -var end$8 = unistUtilPosition.end; +var start$g = unistUtilPosition.start; +var end$9 = unistUtilPosition.end; function ruleStyle(tree, file, pref) { var contents = String(file); @@ -44041,8 +44191,8 @@ function ruleStyle(tree, file, pref) { unistUtilVisit(tree, 'thematicBreak', visitor); function visitor(node) { - var initial = start$f(node).offset; - var final = end$8(node).offset; + var initial = start$g(node).offset; + var final = end$9(node).offset; var rule; if (!unistUtilGenerated(node)) { @@ -44095,8 +44245,8 @@ function strongMarker(tree, file, pref) { var remarkLintTableCellPadding = unifiedLintRule('remark-lint:table-cell-padding', tableCellPadding); -var start$g = unistUtilPosition.start; -var end$9 = unistUtilPosition.end; +var start$h = unistUtilPosition.start; +var end$a = unistUtilPosition.end; var styles$3 = {null: true, padded: true, compact: true}; @@ -44144,8 +44294,8 @@ function tableCellPadding(tree, file, pref) { next = cells[column + 1]; fence = contents.slice( - cell ? end$9(cell).offset : start$g(row).offset, - next ? start$g(next).offset : end$9(row).offset + cell ? end$a(cell).offset : start$h(row).offset, + next ? start$h(next).offset : end$a(row).offset ); pos = fence.indexOf('|'); @@ -44222,13 +44372,13 @@ function tableCellPadding(tree, file, pref) { } function size(node) { - return end$9(node).offset - start$g(node).offset + return end$a(node).offset - start$h(node).offset } var remarkLintTablePipes = unifiedLintRule('remark-lint:table-pipes', tablePipes); -var start$h = unistUtilPosition.start; -var end$a = unistUtilPosition.end; +var start$i = unistUtilPosition.start; +var end$b = unistUtilPosition.end; var reasonStart = 'Missing initial pipe in table fence'; var reasonEnd = 'Missing final pipe in table fence'; @@ -44256,15 +44406,15 @@ function tablePipes(tree, file) { cells = row.children; head = cells[0]; tail = cells[cells.length - 1]; - initial = contents.slice(start$h(row).offset, start$h(head).offset); - final = contents.slice(end$a(tail).offset, end$a(row).offset); + initial = contents.slice(start$i(row).offset, start$i(head).offset); + final = contents.slice(end$b(tail).offset, end$b(row).offset); if (initial.indexOf('|') === -1) { - file.message(reasonStart, start$h(row)); + file.message(reasonStart, start$i(row)); } if (final.indexOf('|') === -1) { - file.message(reasonEnd, end$a(row)); + file.message(reasonEnd, end$b(row)); } } } @@ -44276,7 +44426,7 @@ var remarkLintUnorderedListMarkerStyle = unifiedLintRule( unorderedListMarkerStyle ); -var start$i = unistUtilPosition.start; +var start$j = unistUtilPosition.start; var styles$4 = { '-': true, @@ -44312,7 +44462,7 @@ function unorderedListMarkerStyle(tree, file, pref) { if (!unistUtilGenerated(child)) { marker = contents - .slice(start$i(child).offset, start$i(child.children[0]).offset) + .slice(start$j(child).offset, start$j(child.children[0]).offset) .replace(/\[[x ]?]\s*$/i, '') .replace(/\s/g, ''); @@ -44372,8 +44522,9 @@ var plugins$2 = [ { no: "hostname", yes: "host name" }, { no: "[Jj]avascript", yes: "JavaScript" }, { no: "Node", yes: "Node.js" }, - { no: "Node.JS", yes: "Node.js" }, - { no: "node.js", yes: "Node.js" }, + { no: "Node\\.JS", yes: "Node.js" }, + { no: "node\\.js", yes: "Node.js" }, + { no: "Node\\.js's?", yes: "the Node.js" }, { no: "[Nn]ote that", yes: "" }, { no: "Rfc", yes: "RFC" }, { no: "[Rr][Ff][Cc]\\d+", yes: "RFC " }, diff --git a/tools/make-v8.sh b/tools/make-v8.sh index 37bc57f4994..e4024330b16 100755 --- a/tools/make-v8.sh +++ b/tools/make-v8.sh @@ -28,7 +28,7 @@ if [[ "$ARCH" == "s390x" ]] || [[ "$ARCH" == "ppc64le" ]]; then g++ --version gcc --version export PKG_CONFIG_PATH=$BUILD_TOOLS/pkg-config - gn gen -v out.gn/$BUILD_ARCH_TYPE --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\"" + gn gen -v out.gn/$BUILD_ARCH_TYPE --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true" ninja -v -C out.gn/$BUILD_ARCH_TYPE d8 cctest inspector-test else PATH=~/_depot_tools:$PATH tools/dev/v8gen.py $BUILD_ARCH_TYPE --no-goma $V8_BUILD_OPTIONS diff --git a/tools/msvs/msi/product.wxs b/tools/msvs/msi/product.wxs index f008ea7f9ad..8a278637e60 100755 --- a/tools/msvs/msi/product.wxs +++ b/tools/msvs/msi/product.wxs @@ -23,8 +23,8 @@ Compressed="yes" InstallScope="perMachine"/> - - = 601)]]> + + = 603) OR (VersionNT >= 602 AND MsiNTProductType <> 1)]]> diff --git a/tools/node-lint-md-cli-rollup/package-lock.json b/tools/node-lint-md-cli-rollup/package-lock.json index af4ac3812a0..db690e74132 100644 --- a/tools/node-lint-md-cli-rollup/package-lock.json +++ b/tools/node-lint-md-cli-rollup/package-lock.json @@ -301,17 +301,17 @@ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "fault": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.3.tgz", - "integrity": "sha512-sfFuP4X0hzrbGKjAUNXYvNqsZ5F6ohx/dZ9I0KQud/aiZNwg263r5L9yGB0clvXHCkzXh5W3t7RSHchggYIFmA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", + "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", "requires": { - "format": "^0.2.2" + "format": "^0.2.0" } }, "figures": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", - "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "requires": { "escape-string-regexp": "^1.0.5" } @@ -477,9 +477,9 @@ "integrity": "sha512-zxQ9//Q3D/34poZf8fiy3m3XVpbQc7ren15iKqrTtLPwkPD/t3Scy9Imp63FujULGxuK0ZlCwoo5xNpktFgbOA==" }, "is-hidden": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-hidden/-/is-hidden-1.1.2.tgz", - "integrity": "sha512-kytBeNVW2QTIqZdJBDKIjP+EkUTzDT07rsc111w/gxqR6wK3ODkOswcpxgED6HU6t7fEhOxqojVZ2a2kU9rj+A==" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-hidden/-/is-hidden-1.1.3.tgz", + "integrity": "sha512-FFzhGKA9h59OFxeaJl0W5ILTYetI8WsdqdofKr69uLKZdV6hbDKxj8vkpG3L9uS/6Q/XYh1tkXm6xwRGFweETA==" }, "is-module": { "version": "1.0.0", @@ -613,9 +613,9 @@ } }, "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" }, "ms": { "version": "2.1.2", @@ -708,9 +708,9 @@ } }, "readable-stream": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.5.0.tgz", - "integrity": "sha512-gSz026xs2LfxBPudDuI41V1lka8cxg64E66SGe78zJlsUofOg/yqwezdIcdfwik6B4h8LFmWPA9ef9X3FiNFLA==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -1150,12 +1150,38 @@ } }, "remark-lint-prohibited-strings": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/remark-lint-prohibited-strings/-/remark-lint-prohibited-strings-1.2.0.tgz", - "integrity": "sha512-k3Sa0Kk+OJHMnsaRmLzq85BomgVOHbDBq3s5v4BJ6bVNWwYM9KrunNb0iAGomM7l+HfosYoa9Q31xfCuwsWZ4A==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/remark-lint-prohibited-strings/-/remark-lint-prohibited-strings-1.2.1.tgz", + "integrity": "sha512-i3LatoJn/eHkgawdi3eoynikQa5zIEDX+GYcvu4ns5LsOvIrT8WcuvgYQ2kbEFbV0KTy7yBAGLJ9040xs1ssXA==", "requires": { "unified-lint-rule": "^1.0.2", - "unist-util-visit": "^1.2.0" + "unist-util-visit": "^2.0.0" + }, + "dependencies": { + "unist-util-is": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.2.tgz", + "integrity": "sha512-Ofx8uf6haexJwI1gxWMGg6I/dLnF2yE+KibhD3/diOqY2TinLcqHXCV6OI5gFVn3xQqDH+u0M625pfKwIwgBKQ==" + }, + "unist-util-visit": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.2.tgz", + "integrity": "sha512-HoHNhGnKj6y+Sq+7ASo2zpVdfdRifhTgX2KTU3B/sO/TTlZchp7E3S4vjRzDJ7L60KmrCPsQkVK3lEF3cz36XQ==", + "requires": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + } + }, + "unist-util-visit-parents": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.0.2.tgz", + "integrity": "sha512-yJEfuZtzFpQmg1OSCyS9M5NJRrln/9FbYosH3iW0MG402QbdbaB8ZESwUv9RO6nRfLAKvWcMxCwdLWOov36x/g==", + "requires": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0" + } + } } }, "remark-lint-rule-style": { @@ -1246,9 +1272,9 @@ } }, "remark-preset-lint-node": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-1.12.0.tgz", - "integrity": "sha512-Un9RH6cSLgI/fECdgFh9cxRjYVtnwmxsRPwJIsKjX9aOIVM0ohRCPeJ/Sh4nhBtL7PUnF2qMsIwt9b8OlL9HnA==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-1.13.0.tgz", + "integrity": "sha512-UNAoY4wl672d0qE+LM5rA0ILOTJN+siNGj3/qa5Zvl7nMIUwqMcz0G266Ck6OL6GOrpys/e4EOrkXiitEdEqNA==", "requires": { "remark-lint": "^6.0.5", "remark-lint-blockquote-indentation": "^1.0.3", @@ -1275,7 +1301,7 @@ "remark-lint-no-table-indentation": "^1.0.4", "remark-lint-no-tabs": "^1.0.3", "remark-lint-no-trailing-spaces": "^2.0.1", - "remark-lint-prohibited-strings": "^1.2.0", + "remark-lint-prohibited-strings": "^1.2.1", "remark-lint-rule-style": "^1.0.3", "remark-lint-strong-marker": "^1.0.3", "remark-lint-table-cell-padding": "^1.0.4", @@ -1618,9 +1644,9 @@ "integrity": "sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A==" }, "unist-util-position": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.0.4.tgz", - "integrity": "sha512-tWvIbV8goayTjobxDIr4zVTyG+Q7ragMSMeKC3xnPl9xzIc0+she8mxXLM3JVNDDsfARPbCd3XdzkyLdo7fF3g==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", + "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==" }, "unist-util-remove-position": { "version": "1.1.4", diff --git a/tools/node-lint-md-cli-rollup/package.json b/tools/node-lint-md-cli-rollup/package.json index 67cad50ce21..be155b49e95 100644 --- a/tools/node-lint-md-cli-rollup/package.json +++ b/tools/node-lint-md-cli-rollup/package.json @@ -13,7 +13,7 @@ "markdown-extensions": "^1.1.1", "remark": "^11.0.2", "remark-lint": "^6.0.5", - "remark-preset-lint-node": "^1.12.0", + "remark-preset-lint-node": "^1.13.0", "unified-args": "^7.1.0" }, "main": "dist/index.js", diff --git a/tools/node-lint-md-cli-rollup/rollup.config.js b/tools/node-lint-md-cli-rollup/rollup.config.js index 73770d8836a..49b9817ca58 100644 --- a/tools/node-lint-md-cli-rollup/rollup.config.js +++ b/tools/node-lint-md-cli-rollup/rollup.config.js @@ -34,8 +34,7 @@ module.exports = { if (normID === '/node_modules/unified-args/lib/options.js') { return code.replace('\'./schema\'', '\'./schema.json\''); } - if (normID === '/node_modules/chokidar/lib/fsevents-handler.js' && - process.platform !== 'darwin') { + if (normID === '/node_modules/chokidar/lib/fsevents-handler.js') { return code.replace( 'fsevents = require(\'fsevents\');', 'fsevents = undefined;' ); diff --git a/tools/osx-codesign.sh b/tools/osx-codesign.sh index 6a954c737fa..7ca80ca7462 100644 --- a/tools/osx-codesign.sh +++ b/tools/osx-codesign.sh @@ -8,4 +8,13 @@ if [ "X$SIGN" == "X" ]; then exit 0 fi -codesign -s "$SIGN" "$PKGDIR"/bin/node +# All macOS executable binaries in the bundle must be codesigned with the +# hardened runtime enabled. +# See https://github.com/nodejs/node/pull/31459 + +codesign \ + --sign "$SIGN" \ + --entitlements tools/osx-entitlements.plist \ + --options runtime \ + --timestamp \ + "$PKGDIR"/bin/node diff --git a/tools/osx-entitlements.plist b/tools/osx-entitlements.plist new file mode 100644 index 00000000000..555c10f7ff8 --- /dev/null +++ b/tools/osx-entitlements.plist @@ -0,0 +1,16 @@ + + + + + com.apple.security.cs.allow-jit + + com.apple.security.cs.allow-unsigned-executable-memory + + com.apple.security.cs.disable-executable-page-protection + + com.apple.security.cs.allow-dyld-environment-variables + + com.apple.security.cs.disable-library-validation + + + diff --git a/tools/osx-gon-config.json.tmpl b/tools/osx-gon-config.json.tmpl new file mode 100644 index 00000000000..3ea16465fc1 --- /dev/null +++ b/tools/osx-gon-config.json.tmpl @@ -0,0 +1,12 @@ +{ + "notarize": [{ + "path": "node-{{pkgid}}.pkg", + "bundle_id": "org.nodejs.pkg.{{pkgid}}", + "staple": true + }], + + "apple_id": { + "username": "{{appleid}}", + "password": "@env:NOTARIZATION_PASSWORD" + } +} diff --git a/tools/osx-notarize.sh b/tools/osx-notarize.sh new file mode 100755 index 00000000000..97bb0912722 --- /dev/null +++ b/tools/osx-notarize.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Uses gon, from https://github.com/mitchellh/gon, to notarize a generated node-.pkg file +# with Apple for installation on macOS Catalina and later as validated by Gatekeeper. + +set -e + +gon_version="0.2.2" +gon_exe="${HOME}/.gon/gon_${gon_version}" + +__dirname="$(CDPATH= cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +pkgid="$1" + +if [ "X${pkgid}" == "X" ]; then + echo "Usage: $0 " + exit 1 +fi + +if [ "X$NOTARIZATION_ID" == "X" ]; then + echo "No NOTARIZATION_ID environment var. Skipping notarization." + exit 0 +fi + +set -x + +mkdir -p "${HOME}/.gon/" + +if [ ! -f "${gon_exe}" ]; then + curl -sL "https://github.com/mitchellh/gon/releases/download/v${gon_version}/gon_${gon_version}_macos.zip" -o "${gon_exe}.zip" + (cd "${HOME}/.gon/" && rm -f gon && unzip "${gon_exe}.zip" && mv gon "${gon_exe}") +fi + +cat tools/osx-gon-config.json.tmpl \ + | sed -e "s/{{appleid}}/${NOTARIZATION_ID}/" -e "s/{{pkgid}}/${pkgid}/" \ + > gon-config.json + +"${gon_exe}" -log-level=info gon-config.json