diff --git a/.github/dictionary.txt b/.github/dictionary.txt index 71e5ed28d0..f82d7a7920 100644 --- a/.github/dictionary.txt +++ b/.github/dictionary.txt @@ -14,3 +14,9 @@ additionals SECG Certicom RSAES +dialback +chacha +peerStore +xxhandshake +zerolen +connmanager \ No newline at end of file diff --git a/packages/connection-encrypter-noise/.aegir.js b/packages/connection-encrypter-noise/.aegir.js new file mode 100644 index 0000000000..95c3276d23 --- /dev/null +++ b/packages/connection-encrypter-noise/.aegir.js @@ -0,0 +1,7 @@ + +/** @type {import('aegir').PartialOptions} */ +export default { + docs: { + entryPoint: "src/index.ts" + } +} diff --git a/packages/connection-encrypter-noise/README.md b/packages/connection-encrypter-noise/README.md new file mode 100644 index 0000000000..177b73b941 --- /dev/null +++ b/packages/connection-encrypter-noise/README.md @@ -0,0 +1,95 @@ +# @chainsafe/libp2p-noise + +![npm](https://img.shields.io/npm/v/@chainsafe/libp2p-noise) +[![](https://img.shields.io/github/actions/workflow/status/ChainSafe/js-libp2p-noise/js-test-and-release.yml?branch=master)](https://github.com/ChainSafe/js-libp2p-noise/actions) +[![](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](https://libp2p.io/) +![](https://img.shields.io/github/issues-raw/ChainSafe/js-libp2p-noise) +[![License Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) +[![License MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +![](https://img.shields.io/badge/npm-%3E%3D7.0.0-orange.svg?style=flat-square) +![](https://img.shields.io/badge/Node.js-%3E%3D16.0.0-orange.svg?style=flat-square) +![](https://img.shields.io/badge/browsers-last%202%20versions%2C%20not%20ie%20%3C%3D11-orange) +[![Twitter](https://img.shields.io/twitter/follow/ChainSafeth.svg?label=Twitter)](https://twitter.com/ChainSafeth) +[![Discord](https://img.shields.io/discord/593655374469660673.svg?label=Discord\&logo=discord)](https://discord.gg/Q6A3YA2) + +> Noise libp2p handshake for js-libp2p + +# About + + + +This repository contains TypeScript implementation of noise protocol, an encryption protocol used in libp2p. + +## Usage + +Install with `yarn add @chainsafe/libp2p-noise` or `npm i @chainsafe/libp2p-noise`. + +Example of using default noise configuration and passing it to the libp2p config: + +```ts +import {createLibp2p} from "libp2p" +import {noise} from "@chainsafe/libp2p-noise" + +//custom noise configuration, pass it instead of `noise()` +//x25519 private key +const n = noise({ staticNoiseKey }); + +const libp2p = await createLibp2p({ + connectionEncrypters: [noise()], + //... other options +}) +``` + +See the [NoiseInit](https://github.com/ChainSafe/js-libp2p-noise/blob/master/src/noise.ts#L22-L30) interface for noise configuration options. + +## API + +This module exposes an implementation of the [ConnectionEncrypter](https://libp2p.github.io/js-libp2p/interfaces/_libp2p_interface.ConnectionEncrypter.html) interface. + +## Bring your own crypto + +You can provide a custom crypto implementation (instead of the default, based on [@noble](https://paulmillr.com/noble/)) by adding a `crypto` field to the init argument passed to the `Noise` factory. + +The implementation must conform to the `ICryptoInterface`, defined in + +# Install + +```console +$ npm i @chainsafe/libp2p-noise +``` + +## Browser ` +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/ChainSafe/js-libp2p-noise/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/ChainSafe/js-libp2p-noise/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/connection-encrypter-noise/package.json b/packages/connection-encrypter-noise/package.json new file mode 100644 index 0000000000..fcfe5e0ef6 --- /dev/null +++ b/packages/connection-encrypter-noise/package.json @@ -0,0 +1,188 @@ +{ + "name": "@chainsafe/libp2p-noise", + "version": "16.1.4", + "description": "Noise libp2p handshake for js-libp2p", + "author": "ChainSafe ", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/ChainSafe/js-libp2p-noise#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/ChainSafe/js-libp2p-noise.git" + }, + "bugs": { + "url": "https://github.com/ChainSafe/js-libp2p-noise/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "crypto", + "libp2p", + "noise" + ], + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "release": { + "branches": [ + "master" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "bench": "node benchmarks/benchmark.js", + "clean": "aegir clean", + "dep-check": "aegir dep-check", + "build": "aegir build", + "lint": "aegir lint", + "lint:fix": "aegir lint --fix", + "test": "aegir test", + "test:node": "aegir test -t node", + "test:browser": "aegir test -t browser -t webworker", + "test:electron-main": "aegir test -t electron-main", + "test:interop": "aegir test -t node -f dist/test/interop.js", + "docs": "aegir docs", + "proto:gen": "protons ./src/proto/payload.proto", + "prepublish": "npm run build", + "release": "aegir release" + }, + "dependencies": { + "@chainsafe/as-chacha20poly1305": "^0.1.0", + "@chainsafe/as-sha256": "^1.0.0", + "@libp2p/crypto": "^5.0.0", + "@libp2p/interface": "^2.9.0", + "@libp2p/peer-id": "^5.0.0", + "@libp2p/utils": "^6.7.1", + "@noble/ciphers": "^1.1.3", + "@noble/curves": "^1.1.0", + "@noble/hashes": "^1.3.1", + "protons-runtime": "^5.5.0", + "uint8arraylist": "^2.4.3", + "uint8arrays": "^5.0.0", + "wherearewe": "^2.0.1" + }, + "devDependencies": { + "@chainsafe/libp2p-yamux": "^7.0.0", + "@libp2p/daemon-client": "^9.0.0", + "@libp2p/daemon-server": "^8.0.0", + "@libp2p/interface-compliance-tests": "^6.0.0", + "@libp2p/interop": "^13.0.0", + "@libp2p/logger": "^5.0.0", + "@libp2p/tcp": "^10.0.0", + "@multiformats/multiaddr": "^12.1.0", + "@types/sinon": "^17.0.1", + "aegir": "^47.0.18", + "execa": "^9.3.0", + "go-libp2p": "^1.0.3", + "iso-random-stream": "^2.0.2", + "libp2p": "^2.0.0", + "mkdirp": "^3.0.0", + "multiformats": "^13.2.2", + "p-defer": "^4.0.0", + "protons": "^7.6.0", + "sinon": "^21.0.0", + "sinon-ts": "^2.0.0" + }, + "browser": { + "./dist/src/crypto/index.js": "./dist/src/crypto/index.browser.js" + } +} diff --git a/packages/connection-encrypter-noise/src/constants.ts b/packages/connection-encrypter-noise/src/constants.ts new file mode 100644 index 0000000000..952bb74448 --- /dev/null +++ b/packages/connection-encrypter-noise/src/constants.ts @@ -0,0 +1,5 @@ +export const NOISE_MSG_MAX_LENGTH_BYTES = 65535 +export const NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG = NOISE_MSG_MAX_LENGTH_BYTES - 16 + +export const DUMP_SESSION_KEYS = Boolean(globalThis.process?.env?.DUMP_SESSION_KEYS) +export const CHACHA_TAG_LENGTH = 16 diff --git a/packages/connection-encrypter-noise/src/crypto.ts b/packages/connection-encrypter-noise/src/crypto.ts new file mode 100644 index 0000000000..75c0f57813 --- /dev/null +++ b/packages/connection-encrypter-noise/src/crypto.ts @@ -0,0 +1,27 @@ +import type { ICrypto, KeyPair } from './types.js' +import type { Uint8ArrayList } from 'uint8arraylist' + +/** Underlying crypto implementation, meant to be overridable */ +export interface ICryptoInterface { + hashSHA256(data: Uint8Array | Uint8ArrayList): Uint8Array + + getHKDF(ck: Uint8Array, ikm: Uint8Array): [Uint8Array, Uint8Array, Uint8Array] + + generateX25519KeyPair(): KeyPair + generateX25519KeyPairFromSeed(seed: Uint8Array): KeyPair + generateX25519SharedKey(privateKey: Uint8Array | Uint8ArrayList, publicKey: Uint8Array | Uint8ArrayList): Uint8Array + + chaCha20Poly1305Encrypt(plaintext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array): Uint8ArrayList | Uint8Array + chaCha20Poly1305Decrypt(ciphertext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array, dst?: Uint8Array): Uint8ArrayList | Uint8Array +} + +export function wrapCrypto (crypto: ICryptoInterface): ICrypto { + return { + generateKeypair: crypto.generateX25519KeyPair, + dh: (keypair, publicKey) => crypto.generateX25519SharedKey(keypair.privateKey, publicKey).subarray(0, 32), + encrypt: crypto.chaCha20Poly1305Encrypt, + decrypt: crypto.chaCha20Poly1305Decrypt, + hash: crypto.hashSHA256, + hkdf: crypto.getHKDF + } +} diff --git a/packages/connection-encrypter-noise/src/crypto/index.browser.ts b/packages/connection-encrypter-noise/src/crypto/index.browser.ts new file mode 100644 index 0000000000..39c9ea7d01 --- /dev/null +++ b/packages/connection-encrypter-noise/src/crypto/index.browser.ts @@ -0,0 +1,3 @@ +import { pureJsCrypto } from './js.js' + +export const defaultCrypto = pureJsCrypto diff --git a/packages/connection-encrypter-noise/src/crypto/index.ts b/packages/connection-encrypter-noise/src/crypto/index.ts new file mode 100644 index 0000000000..6d3639bb9e --- /dev/null +++ b/packages/connection-encrypter-noise/src/crypto/index.ts @@ -0,0 +1,219 @@ +import crypto from 'node:crypto' +import { newInstance, ChaCha20Poly1305 } from '@chainsafe/as-chacha20poly1305' +import { digest } from '@chainsafe/as-sha256' +import { Uint8ArrayList } from 'uint8arraylist' +import { isElectronMain } from 'wherearewe' +import { pureJsCrypto } from './js.js' +import type { ICryptoInterface } from '../crypto.js' +import type { KeyPair } from '../types.js' + +const ctx = newInstance() +const asImpl = new ChaCha20Poly1305(ctx) +const CHACHA_POLY1305 = 'chacha20-poly1305' +const PKCS8_PREFIX = Buffer.from([0x30, 0x2e, 0x02, 0x01, 0x00, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x6e, 0x04, 0x22, 0x04, 0x20]) +const X25519_PREFIX = Buffer.from([0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x6e, 0x03, 0x21, 0x00]) +const nodeCrypto: Pick = { + hashSHA256 (data) { + const hash = crypto.createHash('sha256') + + if (data instanceof Uint8Array) { + return hash.update(data).digest() + } + + for (const buf of data) { + hash.update(buf) + } + + return hash.digest() + }, + + chaCha20Poly1305Encrypt (plaintext, nonce, ad, k) { + const cipher = crypto.createCipheriv(CHACHA_POLY1305, k, nonce, { + authTagLength: 16 + }) + cipher.setAAD(ad, { plaintextLength: plaintext.byteLength }) + + if (plaintext instanceof Uint8Array) { + const updated = cipher.update(plaintext) + const final = cipher.final() + const tag = cipher.getAuthTag() + + return Buffer.concat([updated, final, tag], updated.byteLength + final.byteLength + tag.byteLength) + } + + const output = new Uint8ArrayList() + + for (const buf of plaintext) { + output.append(cipher.update(buf)) + } + + const final = cipher.final() + + if (final.byteLength > 0) { + output.append(final) + } + + output.append(cipher.getAuthTag()) + + return output + }, + + chaCha20Poly1305Decrypt (ciphertext, nonce, ad, k, _dst) { + const authTag = ciphertext.subarray(ciphertext.length - 16) + const decipher = crypto.createDecipheriv(CHACHA_POLY1305, k, nonce, { + authTagLength: 16 + }) + + let text: Uint8Array | Uint8ArrayList + + if (ciphertext instanceof Uint8Array) { + text = ciphertext.subarray(0, ciphertext.length - 16) + } else { + text = ciphertext.sublist(0, ciphertext.length - 16) + } + + decipher.setAAD(ad, { + plaintextLength: text.byteLength + }) + decipher.setAuthTag(authTag) + + if (text instanceof Uint8Array) { + const output = decipher.update(text) + const final = decipher.final() + + if (final.byteLength > 0) { + return Buffer.concat([output, final], output.byteLength + final.byteLength) + } + + return output + } + + const output = new Uint8ArrayList() + + for (const buf of text) { + output.append(decipher.update(buf)) + } + + const final = decipher.final() + + if (final.byteLength > 0) { + output.append(final) + } + + return output + } +} + +const asCrypto: Pick = { + hashSHA256 (data) { + return digest(data.subarray()) + }, + chaCha20Poly1305Encrypt (plaintext, nonce, ad, k) { + return asImpl.seal(k, nonce, plaintext.subarray(), ad) + }, + chaCha20Poly1305Decrypt (ciphertext, nonce, ad, k, dst) { + const plaintext = asImpl.open(k, nonce, ciphertext.subarray(), ad, dst) + if (!plaintext) { + throw new Error('Invalid chacha20poly1305 decryption') + } + return plaintext + } +} + +// benchmarks show that for chacha20poly1305 +// the as implementation is faster for smaller payloads(<1200) +// and the node implementation is faster for larger payloads +export const defaultCrypto: ICryptoInterface = { + ...pureJsCrypto, + hashSHA256 (data) { + return nodeCrypto.hashSHA256(data) + }, + chaCha20Poly1305Encrypt (plaintext, nonce, ad, k) { + if (plaintext.byteLength < 1200) { + return asCrypto.chaCha20Poly1305Encrypt(plaintext, nonce, ad, k) + } + return nodeCrypto.chaCha20Poly1305Encrypt(plaintext, nonce, ad, k) + }, + chaCha20Poly1305Decrypt (ciphertext, nonce, ad, k, dst) { + if (ciphertext.byteLength < 1200) { + return asCrypto.chaCha20Poly1305Decrypt(ciphertext, nonce, ad, k, dst) + } + return nodeCrypto.chaCha20Poly1305Decrypt(ciphertext, nonce, ad, k, dst) + }, + generateX25519KeyPair (): KeyPair { + const { publicKey, privateKey } = crypto.generateKeyPairSync('x25519', { + publicKeyEncoding: { + type: 'spki', + format: 'der' + }, + privateKeyEncoding: { + type: 'pkcs8', + format: 'der' + } + }) + + return { + publicKey: publicKey.subarray(X25519_PREFIX.length), + privateKey: privateKey.subarray(PKCS8_PREFIX.length) + } + }, + generateX25519KeyPairFromSeed (seed: Uint8Array): KeyPair { + const privateKey = crypto.createPrivateKey({ + key: Buffer.concat([ + PKCS8_PREFIX, + seed + ], PKCS8_PREFIX.byteLength + seed.byteLength), + type: 'pkcs8', + format: 'der' + }) + + const publicKey = crypto.createPublicKey(privateKey) + .export({ + type: 'spki', + format: 'der' + }).subarray(X25519_PREFIX.length) + + return { + publicKey, + privateKey: seed + } + }, + generateX25519SharedKey (privateKey: Uint8Array | Uint8ArrayList, publicKey: Uint8Array | Uint8ArrayList): Uint8Array { + if (publicKey instanceof Uint8Array) { + publicKey = Buffer.concat([ + X25519_PREFIX, + publicKey + ], X25519_PREFIX.byteLength + publicKey.byteLength) + } else { + publicKey = new Uint8ArrayList(X25519_PREFIX, publicKey).subarray() + } + + if (privateKey instanceof Uint8Array) { + privateKey = Buffer.concat([ + PKCS8_PREFIX, + privateKey + ], PKCS8_PREFIX.byteLength + privateKey.byteLength) + } else { + privateKey = new Uint8ArrayList(PKCS8_PREFIX, privateKey).subarray() + } + + return crypto.diffieHellman({ + publicKey: crypto.createPublicKey({ + key: Buffer.from(publicKey.buffer, publicKey.byteOffset, publicKey.byteLength), + type: 'spki', + format: 'der' + }), + privateKey: crypto.createPrivateKey({ + key: Buffer.from(privateKey.buffer, privateKey.byteOffset, privateKey.byteLength), + type: 'pkcs8', + format: 'der' + }) + }) + } +} + +// no chacha20-poly1305 in electron https://github.com/electron/electron/issues/24024 +if (isElectronMain) { + defaultCrypto.chaCha20Poly1305Encrypt = asCrypto.chaCha20Poly1305Encrypt + defaultCrypto.chaCha20Poly1305Decrypt = asCrypto.chaCha20Poly1305Decrypt +} diff --git a/packages/connection-encrypter-noise/src/crypto/js.ts b/packages/connection-encrypter-noise/src/crypto/js.ts new file mode 100644 index 0000000000..7f9ca1684c --- /dev/null +++ b/packages/connection-encrypter-noise/src/crypto/js.ts @@ -0,0 +1,56 @@ +import { chacha20poly1305 } from '@noble/ciphers/chacha' +import { x25519 } from '@noble/curves/ed25519' +import { extract, expand } from '@noble/hashes/hkdf' +import { sha256 } from '@noble/hashes/sha2' +import type { ICryptoInterface } from '../crypto.js' +import type { KeyPair } from '../types.js' +import type { Uint8ArrayList } from 'uint8arraylist' + +export const pureJsCrypto: ICryptoInterface = { + hashSHA256 (data: Uint8Array | Uint8ArrayList): Uint8Array { + return sha256(data.subarray()) + }, + + getHKDF (ck: Uint8Array, ikm: Uint8Array): [Uint8Array, Uint8Array, Uint8Array] { + const prk = extract(sha256, ikm, ck) + const okmU8Array = expand(sha256, prk, undefined, 96) + const okm = okmU8Array + + const k1 = okm.subarray(0, 32) + const k2 = okm.subarray(32, 64) + const k3 = okm.subarray(64, 96) + + return [k1, k2, k3] + }, + + generateX25519KeyPair (): KeyPair { + const secretKey = x25519.utils.randomSecretKey() + const publicKey = x25519.getPublicKey(secretKey) + + return { + publicKey, + privateKey: secretKey + } + }, + + generateX25519KeyPairFromSeed (seed: Uint8Array): KeyPair { + const publicKey = x25519.getPublicKey(seed) + + return { + publicKey, + privateKey: seed + } + }, + + generateX25519SharedKey (privateKey: Uint8Array | Uint8ArrayList, publicKey: Uint8Array | Uint8ArrayList): Uint8Array { + return x25519.getSharedSecret(privateKey.subarray(), publicKey.subarray()) + }, + + chaCha20Poly1305Encrypt (plaintext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array): Uint8Array { + return chacha20poly1305(k, nonce, ad).encrypt(plaintext.subarray()) + }, + + chaCha20Poly1305Decrypt (ciphertext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array, dst?: Uint8Array): Uint8Array { + return chacha20poly1305(k, nonce, ad).decrypt(ciphertext.subarray(), dst) + } +} diff --git a/packages/connection-encrypter-noise/src/encoder.ts b/packages/connection-encrypter-noise/src/encoder.ts new file mode 100644 index 0000000000..cbc2741ebf --- /dev/null +++ b/packages/connection-encrypter-noise/src/encoder.ts @@ -0,0 +1,24 @@ +import { allocUnsafe as uint8ArrayAllocUnsafe } from 'uint8arrays/alloc' +import type { Uint8ArrayList } from 'uint8arraylist' + +export const uint16BEEncode = (value: number): Uint8Array => { + const target = uint8ArrayAllocUnsafe(2) + target[0] = value >> 8 + target[1] = value + return target +} +uint16BEEncode.bytes = 2 + +export const uint16BEDecode = (data: Uint8Array | Uint8ArrayList): number => { + if (data.length < 2) { throw RangeError('Could not decode int16BE') } + + if (data instanceof Uint8Array) { + let value = 0 + value += data[0] << 8 + value += data[1] + return value + } + + return data.getUint16(0) +} +uint16BEDecode.bytes = 2 diff --git a/packages/connection-encrypter-noise/src/errors.ts b/packages/connection-encrypter-noise/src/errors.ts new file mode 100644 index 0000000000..6d526dae04 --- /dev/null +++ b/packages/connection-encrypter-noise/src/errors.ts @@ -0,0 +1,10 @@ +export class InvalidCryptoExchangeError extends Error { + public code: string + + constructor (message = 'Invalid crypto exchange') { + super(message) + this.code = InvalidCryptoExchangeError.code + } + + static readonly code = 'ERR_INVALID_CRYPTO_EXCHANGE' +} diff --git a/packages/connection-encrypter-noise/src/index.ts b/packages/connection-encrypter-noise/src/index.ts new file mode 100644 index 0000000000..3d288b0dc8 --- /dev/null +++ b/packages/connection-encrypter-noise/src/index.ts @@ -0,0 +1,58 @@ +/** + * @packageDocumentation + * + * This repository contains TypeScript implementation of noise protocol, an encryption protocol used in libp2p. + * + * ## Usage + * + * Install with `yarn add @chainsafe/libp2p-noise` or `npm i @chainsafe/libp2p-noise`. + * + * Example of using default noise configuration and passing it to the libp2p config: + * + * ```ts + * import {createLibp2p} from "libp2p" + * import {noise} from "@chainsafe/libp2p-noise" + * + * //custom noise configuration, pass it instead of `noise()` + * //x25519 private key + * const n = noise({ staticNoiseKey }); + * + * const libp2p = await createLibp2p({ + * connectionEncrypters: [noise()], + * //... other options + * }) + * ``` + * + * See the [NoiseInit](https://github.com/ChainSafe/js-libp2p-noise/blob/master/src/noise.ts#L22-L30) interface for noise configuration options. + * + * ## API + * + * This module exposes an implementation of the [ConnectionEncrypter](https://libp2p.github.io/js-libp2p/interfaces/_libp2p_interface.ConnectionEncrypter.html) interface. + * + * ## Bring your own crypto + * + * You can provide a custom crypto implementation (instead of the default, based on [@noble](https://paulmillr.com/noble/)) by adding a `crypto` field to the init argument passed to the `Noise` factory. + * + * The implementation must conform to the `ICryptoInterface`, defined in + */ + +import { Noise } from './noise.js' +import type { NoiseInit, NoiseExtensions } from './noise.js' +import type { KeyPair } from './types.js' +import type { ComponentLogger, ConnectionEncrypter, Metrics, PeerId, PrivateKey, Upgrader } from '@libp2p/interface' + +export { pureJsCrypto } from './crypto/js.js' +export type { ICryptoInterface } from './crypto.js' +export type { NoiseInit, NoiseExtensions, KeyPair } + +export interface NoiseComponents { + peerId: PeerId + privateKey: PrivateKey + logger: ComponentLogger + upgrader: Upgrader + metrics?: Metrics +} + +export function noise (init: NoiseInit = {}): (components: NoiseComponents) => ConnectionEncrypter { + return (components: NoiseComponents) => new Noise(components, init) +} diff --git a/packages/connection-encrypter-noise/src/logger.ts b/packages/connection-encrypter-noise/src/logger.ts new file mode 100644 index 0000000000..6a0a00672d --- /dev/null +++ b/packages/connection-encrypter-noise/src/logger.ts @@ -0,0 +1,65 @@ +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { DUMP_SESSION_KEYS } from './constants.js' +import type { CipherState } from './protocol.js' +import type { KeyPair } from './types.js' +import type { Logger } from '@libp2p/interface' +import type { Uint8ArrayList } from 'uint8arraylist' + +export function logLocalStaticKeys (s: KeyPair | undefined, keyLogger: Logger): void { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return + } + + if (s) { + keyLogger(`LOCAL_STATIC_PUBLIC_KEY ${uint8ArrayToString(s.publicKey, 'hex')}`) + keyLogger(`LOCAL_STATIC_PRIVATE_KEY ${uint8ArrayToString(s.privateKey, 'hex')}`) + } else { + keyLogger('Missing local static keys.') + } +} + +export function logLocalEphemeralKeys (e: KeyPair | undefined, keyLogger: Logger): void { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return + } + + if (e) { + keyLogger(`LOCAL_PUBLIC_EPHEMERAL_KEY ${uint8ArrayToString(e.publicKey, 'hex')}`) + keyLogger(`LOCAL_PRIVATE_EPHEMERAL_KEY ${uint8ArrayToString(e.privateKey, 'hex')}`) + } else { + keyLogger('Missing local ephemeral keys.') + } +} + +export function logRemoteStaticKey (rs: Uint8Array | Uint8ArrayList | undefined, keyLogger: Logger): void { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return + } + + if (rs) { + keyLogger(`REMOTE_STATIC_PUBLIC_KEY ${uint8ArrayToString(rs.subarray(), 'hex')}`) + } else { + keyLogger('Missing remote static public key.') + } +} + +export function logRemoteEphemeralKey (re: Uint8Array | Uint8ArrayList | undefined, keyLogger: Logger): void { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return + } + + if (re) { + keyLogger(`REMOTE_EPHEMERAL_PUBLIC_KEY ${uint8ArrayToString(re.subarray(), 'hex')}`) + } else { + keyLogger('Missing remote ephemeral keys.') + } +} + +export function logCipherState (cs1: CipherState, cs2: CipherState, keyLogger: Logger): void { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return + } + + keyLogger(`CIPHER_STATE_1 ${cs1.n.getUint64()} ${cs1.k && uint8ArrayToString(cs1.k, 'hex')}`) + keyLogger(`CIPHER_STATE_2 ${cs2.n.getUint64()} ${cs2.k && uint8ArrayToString(cs2.k, 'hex')}`) +} diff --git a/packages/connection-encrypter-noise/src/metrics.ts b/packages/connection-encrypter-noise/src/metrics.ts new file mode 100644 index 0000000000..3733d41921 --- /dev/null +++ b/packages/connection-encrypter-noise/src/metrics.ts @@ -0,0 +1,32 @@ +import type { Counter, Metrics } from '@libp2p/interface' + +export type MetricsRegistry = Record + +export function registerMetrics (metrics: Metrics): MetricsRegistry { + return { + xxHandshakeSuccesses: metrics.registerCounter( + 'libp2p_noise_xxhandshake_successes_total', { + help: 'Total count of noise xxHandshakes successes_' + }), + + xxHandshakeErrors: metrics.registerCounter( + 'libp2p_noise_xxhandshake_error_total', { + help: 'Total count of noise xxHandshakes errors' + }), + + encryptedPackets: metrics.registerCounter( + 'libp2p_noise_encrypted_packets_total', { + help: 'Total count of noise encrypted packets successfully' + }), + + decryptedPackets: metrics.registerCounter( + 'libp2p_noise_decrypted_packets_total', { + help: 'Total count of noise decrypted packets' + }), + + decryptErrors: metrics.registerCounter( + 'libp2p_noise_decrypt_errors_total', { + help: 'Total count of noise decrypt errors' + }) + } +} diff --git a/packages/connection-encrypter-noise/src/noise.ts b/packages/connection-encrypter-noise/src/noise.ts new file mode 100644 index 0000000000..66acbbbe25 --- /dev/null +++ b/packages/connection-encrypter-noise/src/noise.ts @@ -0,0 +1,237 @@ +import { publicKeyFromProtobuf } from '@libp2p/crypto/keys' +import { InvalidCryptoExchangeError, serviceCapabilities } from '@libp2p/interface' +import { peerIdFromPublicKey } from '@libp2p/peer-id' +import { lpStream } from '@libp2p/utils' +import { alloc as uint8ArrayAlloc } from 'uint8arrays/alloc' +import { NOISE_MSG_MAX_LENGTH_BYTES } from './constants.js' +import { defaultCrypto } from './crypto/index.js' +import { wrapCrypto } from './crypto.js' +import { uint16BEDecode, uint16BEEncode } from './encoder.js' +import { registerMetrics } from './metrics.js' +import { performHandshakeInitiator, performHandshakeResponder } from './performHandshake.js' +import { toMessageStream } from './utils.ts' +import type { ICryptoInterface } from './crypto.js' +import type { NoiseComponents } from './index.js' +import type { MetricsRegistry } from './metrics.js' +import type { HandshakeResult, ICrypto, INoiseConnection, INoiseExtensions, KeyPair } from './types.js' +import type { MultiaddrConnection, SecuredConnection, PrivateKey, PublicKey, StreamMuxerFactory, SecureConnectionOptions, Logger, MessageStream } from '@libp2p/interface' +import type { LengthPrefixedStream } from '@libp2p/utils' + +export interface NoiseExtensions { + webtransportCerthashes: Uint8Array[] +} + +export interface NoiseInit { + /** + * x25519 private key, reuse for faster handshakes + */ + staticNoiseKey?: Uint8Array + extensions?: Partial + crypto?: ICryptoInterface + prologueBytes?: Uint8Array +} + +export class Noise implements INoiseConnection { + public protocol = '/noise' + public crypto: ICrypto + + private readonly prologue: Uint8Array + private readonly staticKey: KeyPair + private readonly extensions?: NoiseExtensions + private readonly metrics?: MetricsRegistry + private readonly components: NoiseComponents + private readonly log: Logger + + constructor (components: NoiseComponents, init: NoiseInit = {}) { + const { staticNoiseKey, extensions, crypto, prologueBytes } = init + const { metrics } = components + + this.components = components + this.log = components.logger.forComponent('libp2p:noise') + const _crypto = crypto ?? defaultCrypto + this.crypto = wrapCrypto(_crypto) + this.extensions = { + webtransportCerthashes: [], + ...extensions + } + this.metrics = metrics ? registerMetrics(metrics) : undefined + + if (staticNoiseKey) { + // accepts x25519 private key of length 32 + this.staticKey = _crypto.generateX25519KeyPairFromSeed(staticNoiseKey) + } else { + this.staticKey = _crypto.generateX25519KeyPair() + } + this.prologue = prologueBytes ?? uint8ArrayAlloc(0) + } + + readonly [Symbol.toStringTag] = '@chainsafe/libp2p-noise' + + readonly [serviceCapabilities]: string[] = [ + '@libp2p/connection-encryption', + '@chainsafe/libp2p-noise' + ] + + /** + * Encrypt outgoing data to the remote party (handshake as initiator) + * + * @param connection - streaming iterable duplex that will be encrypted + * @param options + * @param options.remotePeer - PeerId of the remote peer. Used to validate the integrity of the remote peer + * @param options.signal - Used to abort the operation + */ + async secureOutbound (connection: Stream, options?: SecureConnectionOptions): Promise> { + const log = connection.log?.newScope('noise') ?? this.log + const wrappedConnection = lpStream(connection, { + lengthEncoder: uint16BEEncode, + lengthDecoder: uint16BEDecode, + maxDataLength: NOISE_MSG_MAX_LENGTH_BYTES + }) + + const handshake = await this.performHandshakeInitiator( + wrappedConnection, + this.components.privateKey, + log, + options?.remotePeer?.publicKey, + options + ) + const publicKey = publicKeyFromProtobuf(handshake.payload.identityKey) + + return { + connection: toMessageStream(wrappedConnection.unwrap(), handshake, this.metrics), + remoteExtensions: handshake.payload.extensions, + remotePeer: peerIdFromPublicKey(publicKey), + streamMuxer: options?.skipStreamMuxerNegotiation === true ? undefined : this.getStreamMuxer(handshake.payload.extensions?.streamMuxers) + } + } + + private getStreamMuxer (protocols?: string[]): StreamMuxerFactory | undefined { + if (protocols == null || protocols.length === 0) { + return + } + + const streamMuxers = this.components.upgrader.getStreamMuxers() + + if (streamMuxers != null) { + for (const protocol of protocols) { + const streamMuxer = streamMuxers.get(protocol) + + if (streamMuxer != null) { + return streamMuxer + } + } + } + + if (protocols.length) { + throw new InvalidCryptoExchangeError('Early muxer negotiation was requested but the initiator and responder had no common muxers') + } + } + + /** + * Decrypt incoming data (handshake as responder). + * + * @param connection - streaming iterable duplex that will be encrypted + * @param options + * @param options.remotePeer - PeerId of the remote peer. Used to validate the integrity of the remote peer + * @param options.signal - Used to abort the operation + */ + async secureInbound (connection: Stream, options?: SecureConnectionOptions): Promise> { + const log = connection.log?.newScope('noise') ?? this.log + const wrappedConnection = lpStream(connection, { + lengthEncoder: uint16BEEncode, + lengthDecoder: uint16BEDecode, + maxDataLength: NOISE_MSG_MAX_LENGTH_BYTES + }) + + const handshake = await this.performHandshakeResponder( + wrappedConnection, + this.components.privateKey, + log, + options?.remotePeer?.publicKey, + options + ) + const publicKey = publicKeyFromProtobuf(handshake.payload.identityKey) + + return { + connection: toMessageStream(wrappedConnection.unwrap(), handshake, this.metrics), + remoteExtensions: handshake.payload.extensions, + remotePeer: peerIdFromPublicKey(publicKey), + streamMuxer: options?.skipStreamMuxerNegotiation === true ? undefined : this.getStreamMuxer(handshake.payload.extensions?.streamMuxers) + } + } + + /** + * Perform XX handshake as initiator. + */ + private async performHandshakeInitiator ( + connection: LengthPrefixedStream, + // TODO: pass private key in noise constructor via Components + privateKey: PrivateKey, + log: Logger, + remoteIdentityKey?: PublicKey, + options?: SecureConnectionOptions + ): Promise { + let result: HandshakeResult + const streamMuxers = options?.skipStreamMuxerNegotiation === true ? [] : [...this.components.upgrader.getStreamMuxers().keys()] + + try { + result = await performHandshakeInitiator({ + connection, + privateKey, + remoteIdentityKey, + log: log.newScope('xxhandshake'), + crypto: this.crypto, + prologue: this.prologue, + s: this.staticKey, + extensions: { + streamMuxers, + webtransportCerthashes: [], + ...this.extensions + } + }, options) + this.metrics?.xxHandshakeSuccesses.increment() + } catch (e: unknown) { + this.metrics?.xxHandshakeErrors.increment() + throw e + } + + return result + } + + /** + * Perform XX handshake as responder. + */ + private async performHandshakeResponder ( + connection: LengthPrefixedStream, + privateKey: PrivateKey, + log: Logger, + remoteIdentityKey?: PublicKey, + options?: SecureConnectionOptions + ): Promise { + let result: HandshakeResult + const streamMuxers = options?.skipStreamMuxerNegotiation === true ? [] : [...this.components.upgrader.getStreamMuxers().keys()] + + try { + result = await performHandshakeResponder({ + connection, + privateKey, + remoteIdentityKey, + log: log.newScope('xxhandshake'), + crypto: this.crypto, + prologue: this.prologue, + s: this.staticKey, + extensions: { + streamMuxers, + webtransportCerthashes: [], + ...this.extensions + } + }, options) + this.metrics?.xxHandshakeSuccesses.increment() + } catch (e: unknown) { + this.metrics?.xxHandshakeErrors.increment() + throw e + } + + return result + } +} diff --git a/packages/connection-encrypter-noise/src/nonce.ts b/packages/connection-encrypter-noise/src/nonce.ts new file mode 100644 index 0000000000..39c50b4d6c --- /dev/null +++ b/packages/connection-encrypter-noise/src/nonce.ts @@ -0,0 +1,49 @@ +import { alloc as uint8ArrayAlloc } from 'uint8arrays/alloc' + +export const MIN_NONCE = 0 +// For performance reasons, the nonce is represented as a JS `number` +// Although JS `number` can safely represent integers up to 2 ** 53 - 1, we choose to only use +// 4 bytes to store the data for performance reason. +// This is a slight deviation from the noise spec, which describes the max nonce as 2 ** 64 - 2 +// The effect is that this implementation will need a new handshake to be performed after fewer messages are exchanged than other implementations with full uint64 nonces. +// this MAX_NONCE is still a large number of messages, so the practical effect of this is negligible. +export const MAX_NONCE = 0xffffffff + +const ERR_MAX_NONCE = 'Cipher state has reached maximum n, a new handshake must be performed' + +/** + * The nonce is an uint that's increased over time. + * Maintaining different representations help improve performance. + */ +export class Nonce { + private n: number + private readonly bytes: Uint8Array + private readonly view: DataView + + constructor (n = MIN_NONCE) { + this.n = n + this.bytes = uint8ArrayAlloc(12) + this.view = new DataView(this.bytes.buffer, this.bytes.byteOffset, this.bytes.byteLength) + this.view.setUint32(4, n, true) + } + + increment (): void { + this.n++ + // Even though we're treating the nonce as 8 bytes, RFC7539 specifies 12 bytes for a nonce. + this.view.setUint32(4, this.n, true) + } + + getBytes (): Uint8Array { + return this.bytes + } + + getUint64 (): number { + return this.n + } + + assertValue (): void { + if (this.n > MAX_NONCE) { + throw new Error(ERR_MAX_NONCE) + } + } +} diff --git a/packages/connection-encrypter-noise/src/performHandshake.ts b/packages/connection-encrypter-noise/src/performHandshake.ts new file mode 100644 index 0000000000..dc197ba978 --- /dev/null +++ b/packages/connection-encrypter-noise/src/performHandshake.ts @@ -0,0 +1,91 @@ +import { + logLocalStaticKeys, + logLocalEphemeralKeys, + logRemoteEphemeralKey, + logRemoteStaticKey, + logCipherState +} from './logger.js' +import { ZEROLEN, XXHandshakeState } from './protocol.js' +import { createHandshakePayload, decodeHandshakePayload } from './utils.js' +import type { HandshakeResult, HandshakeParams } from './types.js' +import type { AbortOptions } from '@libp2p/interface' + +export async function performHandshakeInitiator (init: HandshakeParams, options?: AbortOptions): Promise { + const { log, connection, crypto, privateKey, prologue, s, remoteIdentityKey, extensions } = init + + const payload = await createHandshakePayload(privateKey, s.publicKey, extensions) + const xx = new XXHandshakeState({ + crypto, + protocolName: 'Noise_XX_25519_ChaChaPoly_SHA256', + initiator: true, + prologue, + s + }) + + logLocalStaticKeys(xx.s, log) + log.trace('Stage 0 - Initiator starting to send first message.') + await connection.write(xx.writeMessageA(ZEROLEN), options) + log.trace('Stage 0 - Initiator finished sending first message.') + logLocalEphemeralKeys(xx.e, log) + + log.trace('Stage 1 - Initiator waiting to receive first message from responder...') + const plaintext = xx.readMessageB(await connection.read(options)) + log.trace('Stage 1 - Initiator received the message.') + logRemoteEphemeralKey(xx.re, log) + logRemoteStaticKey(xx.rs, log) + + log.trace("Initiator going to check remote's signature...") + const receivedPayload = await decodeHandshakePayload(plaintext, xx.rs, remoteIdentityKey) + log.trace('All good with the signature!') + + log.trace('Stage 2 - Initiator sending third handshake message.') + await connection.write(xx.writeMessageC(payload), options) + log.trace('Stage 2 - Initiator sent message with signed payload.') + + const [cs1, cs2] = xx.ss.split() + logCipherState(cs1, cs2, log) + + return { + payload: receivedPayload, + encrypt: (plaintext) => cs1.encryptWithAd(ZEROLEN, plaintext), + decrypt: (ciphertext, dst) => cs2.decryptWithAd(ZEROLEN, ciphertext, dst) + } +} + +export async function performHandshakeResponder (init: HandshakeParams, options?: AbortOptions): Promise { + const { log, connection, crypto, privateKey, prologue, s, remoteIdentityKey, extensions } = init + + const payload = await createHandshakePayload(privateKey, s.publicKey, extensions) + const xx = new XXHandshakeState({ + crypto, + protocolName: 'Noise_XX_25519_ChaChaPoly_SHA256', + initiator: false, + prologue, + s + }) + + logLocalStaticKeys(xx.s, log) + log.trace('Stage 0 - Responder waiting to receive first message.') + xx.readMessageA(await connection.read(options)) + log.trace('Stage 0 - Responder received first message.') + logRemoteEphemeralKey(xx.re, log) + + log.trace('Stage 1 - Responder sending out first message with signed payload and static key.') + await connection.write(xx.writeMessageB(payload), options) + log.trace('Stage 1 - Responder sent the second handshake message with signed payload.') + logLocalEphemeralKeys(xx.e, log) + + log.trace('Stage 2 - Responder waiting for third handshake message...') + const plaintext = xx.readMessageC(await connection.read(options)) + log.trace('Stage 2 - Responder received the message, finished handshake.') + const receivedPayload = await decodeHandshakePayload(plaintext, xx.rs, remoteIdentityKey) + + const [cs1, cs2] = xx.ss.split() + logCipherState(cs1, cs2, log) + + return { + payload: receivedPayload, + encrypt: (plaintext) => cs2.encryptWithAd(ZEROLEN, plaintext), + decrypt: (ciphertext, dst) => cs1.decryptWithAd(ZEROLEN, ciphertext, dst) + } +} diff --git a/packages/connection-encrypter-noise/src/proto/payload.proto b/packages/connection-encrypter-noise/src/proto/payload.proto new file mode 100644 index 0000000000..5ee5490673 --- /dev/null +++ b/packages/connection-encrypter-noise/src/proto/payload.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +message NoiseExtensions { + repeated bytes webtransport_certhashes = 1; + repeated string stream_muxers = 2; +} + +message NoiseHandshakePayload { + bytes identity_key = 1; + bytes identity_sig = 2; + optional NoiseExtensions extensions = 4; +} diff --git a/packages/connection-encrypter-noise/src/proto/payload.ts b/packages/connection-encrypter-noise/src/proto/payload.ts new file mode 100644 index 0000000000..e5a79ff374 --- /dev/null +++ b/packages/connection-encrypter-noise/src/proto/payload.ts @@ -0,0 +1,170 @@ +import { decodeMessage, encodeMessage, MaxLengthError, message } from 'protons-runtime' +import { alloc as uint8ArrayAlloc } from 'uint8arrays/alloc' +import type { Codec, DecodeOptions } from 'protons-runtime' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface NoiseExtensions { + webtransportCerthashes: Uint8Array[] + streamMuxers: string[] +} + +export namespace NoiseExtensions { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.webtransportCerthashes != null) { + for (const value of obj.webtransportCerthashes) { + w.uint32(10) + w.bytes(value) + } + } + + if (obj.streamMuxers != null) { + for (const value of obj.streamMuxers) { + w.uint32(18) + w.string(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length, opts = {}) => { + const obj: any = { + webtransportCerthashes: [], + streamMuxers: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: { + if (opts.limits?.webtransportCerthashes != null && obj.webtransportCerthashes.length === opts.limits.webtransportCerthashes) { + throw new MaxLengthError('Decode error - map field "webtransportCerthashes" had too many elements') + } + + obj.webtransportCerthashes.push(reader.bytes()) + break + } + case 2: { + if (opts.limits?.streamMuxers != null && obj.streamMuxers.length === opts.limits.streamMuxers) { + throw new MaxLengthError('Decode error - map field "streamMuxers" had too many elements') + } + + obj.streamMuxers.push(reader.string()) + break + } + default: { + reader.skipType(tag & 7) + break + } + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, NoiseExtensions.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions): NoiseExtensions => { + return decodeMessage(buf, NoiseExtensions.codec(), opts) + } +} + +export interface NoiseHandshakePayload { + identityKey: Uint8Array + identitySig: Uint8Array + extensions?: NoiseExtensions +} + +export namespace NoiseHandshakePayload { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.identityKey != null && obj.identityKey.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.identityKey) + } + + if ((obj.identitySig != null && obj.identitySig.byteLength > 0)) { + w.uint32(18) + w.bytes(obj.identitySig) + } + + if (obj.extensions != null) { + w.uint32(34) + NoiseExtensions.codec().encode(obj.extensions, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length, opts = {}) => { + const obj: any = { + identityKey: uint8ArrayAlloc(0), + identitySig: uint8ArrayAlloc(0) + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: { + obj.identityKey = reader.bytes() + break + } + case 2: { + obj.identitySig = reader.bytes() + break + } + case 4: { + obj.extensions = NoiseExtensions.codec().decode(reader, reader.uint32(), { + limits: opts.limits?.extensions + }) + break + } + default: { + reader.skipType(tag & 7) + break + } + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, NoiseHandshakePayload.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions): NoiseHandshakePayload => { + return decodeMessage(buf, NoiseHandshakePayload.codec(), opts) + } +} diff --git a/packages/connection-encrypter-noise/src/protocol.ts b/packages/connection-encrypter-noise/src/protocol.ts new file mode 100644 index 0000000000..a71ac3b177 --- /dev/null +++ b/packages/connection-encrypter-noise/src/protocol.ts @@ -0,0 +1,313 @@ +import { Uint8ArrayList } from 'uint8arraylist' +import { fromString as uint8ArrayFromString } from 'uint8arrays' +import { alloc as uint8ArrayAlloc } from 'uint8arrays/alloc' +import { InvalidCryptoExchangeError } from './errors.js' +import { Nonce } from './nonce.js' +import type { ICipherState, ISymmetricState, IHandshakeState, KeyPair, ICrypto } from './types.js' + +// Code in this file is a direct translation of a subset of the noise protocol https://noiseprotocol.org/noise.html, +// agnostic to libp2p's usage of noise + +export const ZEROLEN = uint8ArrayAlloc(0) + +interface ICipherStateWithKey extends ICipherState { + k: Uint8Array +} + +export class CipherState implements ICipherState { + public k?: Uint8Array + public n: Nonce + private readonly crypto: ICrypto + + constructor (crypto: ICrypto, k: Uint8Array | undefined = undefined, n = 0) { + this.crypto = crypto + this.k = k + this.n = new Nonce(n) + } + + public hasKey (): this is ICipherStateWithKey { + return Boolean(this.k) + } + + public encryptWithAd (ad: Uint8Array, plaintext: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + if (!this.hasKey()) { + return plaintext + } + + this.n.assertValue() + const e = this.crypto.encrypt(plaintext, this.n.getBytes(), ad, this.k) + this.n.increment() + + return e + } + + public decryptWithAd (ad: Uint8Array, ciphertext: Uint8Array | Uint8ArrayList, dst?: Uint8Array): Uint8Array | Uint8ArrayList { + if (!this.hasKey()) { + return ciphertext + } + + this.n.assertValue() + const plaintext = this.crypto.decrypt(ciphertext, this.n.getBytes(), ad, this.k, dst) + this.n.increment() + + return plaintext + } +} + +export class SymmetricState implements ISymmetricState { + public cs: CipherState + public ck: Uint8Array + public h: Uint8Array + private readonly crypto: ICrypto + + constructor (crypto: ICrypto, protocolName: string) { + this.crypto = crypto + + const protocolNameBytes = uint8ArrayFromString(protocolName, 'utf-8') + this.h = hashProtocolName(crypto, protocolNameBytes) + + this.ck = this.h + this.cs = new CipherState(crypto) + } + + public mixKey (ikm: Uint8Array): void { + const [ck, tempK] = this.crypto.hkdf(this.ck, ikm) + this.ck = ck + this.cs = new CipherState(this.crypto, tempK) + } + + public mixHash (data: Uint8Array | Uint8ArrayList): void { + this.h = this.crypto.hash(new Uint8ArrayList(this.h, data)) + } + + public encryptAndHash (plaintext: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + const ciphertext = this.cs.encryptWithAd(this.h, plaintext) + this.mixHash(ciphertext) + return ciphertext + } + + public decryptAndHash (ciphertext: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + const plaintext = this.cs.decryptWithAd(this.h, ciphertext) + this.mixHash(ciphertext) + return plaintext + } + + public split (): [CipherState, CipherState] { + const [tempK1, tempK2] = this.crypto.hkdf(this.ck, ZEROLEN) + return [new CipherState(this.crypto, tempK1), new CipherState(this.crypto, tempK2)] + } +} + +// const MESSAGE_PATTERNS = ['e', 's', 'ee', 'es', 'se', 'ss'] as const +// type MessagePattern = Array + +export interface HandshakeStateInit { + crypto: ICrypto + protocolName: string + initiator: boolean + prologue: Uint8Array + s?: KeyPair + e?: KeyPair + rs?: Uint8Array | Uint8ArrayList + re?: Uint8Array | Uint8ArrayList +} + +export abstract class AbstractHandshakeState implements IHandshakeState { + public ss: SymmetricState + public s?: KeyPair + public e?: KeyPair + public rs?: Uint8Array | Uint8ArrayList + public re?: Uint8Array | Uint8ArrayList + public initiator: boolean + protected readonly crypto: ICrypto + + constructor (init: HandshakeStateInit) { + const { crypto, protocolName, prologue, initiator, s, e, rs, re } = init + this.crypto = crypto + this.ss = new SymmetricState(crypto, protocolName) + this.ss.mixHash(prologue) + this.initiator = initiator + this.s = s + this.e = e + this.rs = rs + this.re = re + } + + protected writeE (): Uint8Array { + if (this.e) { + throw new Error('ephemeral keypair is already set') + } + const e = this.crypto.generateKeypair() + this.ss.mixHash(e.publicKey) + this.e = e + return e.publicKey + } + + protected writeS (): Uint8Array | Uint8ArrayList { + if (!this.s) { + throw new Error('static keypair is not set') + } + return this.ss.encryptAndHash(this.s.publicKey) + } + + protected writeEE (): void { + if (!this.e) { + throw new Error('ephemeral keypair is not set') + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set') + } + this.ss.mixKey(this.crypto.dh(this.e, this.re)) + } + + protected writeES (): void { + if (this.initiator) { + if (!this.e) { + throw new Error('ephemeral keypair is not set') + } + if (!this.rs) { + throw new Error('remote static public key is not set') + } + this.ss.mixKey(this.crypto.dh(this.e, this.rs)) + } else { + if (!this.s) { + throw new Error('static keypair is not set') + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set') + } + this.ss.mixKey(this.crypto.dh(this.s, this.re)) + } + } + + protected writeSE (): void { + if (this.initiator) { + if (!this.s) { + throw new Error('static keypair is not set') + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set') + } + this.ss.mixKey(this.crypto.dh(this.s, this.re)) + } else { + if (!this.e) { + throw new Error('ephemeral keypair is not set') + } + if (!this.rs) { + throw new Error('remote static public key is not set') + } + this.ss.mixKey(this.crypto.dh(this.e, this.rs)) + } + } + + protected readE (message: Uint8ArrayList, offset = 0): void { + if (this.re) { + throw new Error('remote ephemeral public key is already set') + } + if (message.byteLength < offset + 32) { + throw new Error('message is not long enough') + } + this.re = message.sublist(offset, offset + 32) + this.ss.mixHash(this.re) + } + + protected readS (message: Uint8ArrayList, offset = 0): number { + if (this.rs) { + throw new Error('remote static public key is already set') + } + const cipherLength = 32 + (this.ss.cs.hasKey() ? 16 : 0) + if (message.byteLength < offset + cipherLength) { + throw new Error('message is not long enough') + } + const temp = message.sublist(offset, offset + cipherLength) + this.rs = this.ss.decryptAndHash(temp) + return cipherLength + } + + protected readEE (): void { + this.writeEE() + } + + protected readES (): void { + this.writeES() + } + + protected readSE (): void { + this.writeSE() + } +} + +/** + * A IHandshakeState that's optimized for the XX pattern + */ +export class XXHandshakeState extends AbstractHandshakeState { + // e + writeMessageA (payload: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + return new Uint8ArrayList(this.writeE(), this.ss.encryptAndHash(payload)) + } + + // e, ee, s, es + writeMessageB (payload: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + const e = this.writeE() + this.writeEE() + const encS = this.writeS() + this.writeES() + + return new Uint8ArrayList(e, encS, this.ss.encryptAndHash(payload)) + } + + // s, se + writeMessageC (payload: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + const encS = this.writeS() + this.writeSE() + + return new Uint8ArrayList(encS, this.ss.encryptAndHash(payload)) + } + + // e + readMessageA (message: Uint8ArrayList): Uint8Array | Uint8ArrayList { + try { + this.readE(message) + + return this.ss.decryptAndHash(message.sublist(32)) + } catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 0 validation fail: ${(e as Error).message}`) + } + } + + // e, ee, s, es + readMessageB (message: Uint8ArrayList): Uint8Array | Uint8ArrayList { + try { + this.readE(message) + this.readEE() + const consumed = this.readS(message, 32) + this.readES() + + return this.ss.decryptAndHash(message.sublist(32 + consumed)) + } catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 1 validation fail: ${(e as Error).message}`) + } + } + + // s, se + readMessageC (message: Uint8ArrayList): Uint8Array | Uint8ArrayList { + try { + const consumed = this.readS(message) + this.readSE() + + return this.ss.decryptAndHash(message.sublist(consumed)) + } catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 2 validation fail: ${(e as Error).message}`) + } + } +} + +function hashProtocolName (crypto: ICrypto, protocolName: Uint8Array): Uint8Array { + if (protocolName.length <= 32) { + const h = uint8ArrayAlloc(32) + h.set(protocolName) + return h + } else { + return crypto.hash(protocolName) + } +} diff --git a/packages/connection-encrypter-noise/src/types.ts b/packages/connection-encrypter-noise/src/types.ts new file mode 100644 index 0000000000..2f0aa9233a --- /dev/null +++ b/packages/connection-encrypter-noise/src/types.ts @@ -0,0 +1,104 @@ +import type { Nonce } from './nonce.js' +import type { NoiseExtensions, NoiseHandshakePayload } from './proto/payload.js' +import type { ConnectionEncrypter, Logger, PrivateKey, PublicKey } from '@libp2p/interface' +import type { LengthPrefixedStream } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' + +/** + * Crypto functions defined by the noise protocol, abstracted from the + * underlying implementations + */ +export interface ICrypto { + generateKeypair(): KeyPair + dh(keypair: KeyPair, publicKey: Uint8Array | Uint8ArrayList): Uint8Array + encrypt(plaintext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array): Uint8ArrayList | Uint8Array + decrypt(ciphertext: Uint8Array | Uint8ArrayList, nonce: Uint8Array, ad: Uint8Array, k: Uint8Array, dst?: Uint8Array): Uint8ArrayList | Uint8Array + hash(data: Uint8Array | Uint8ArrayList): Uint8Array + hkdf(ck: Uint8Array, ikm: Uint8Array): [Uint8Array, Uint8Array, Uint8Array] +} + +export interface HandshakeParams { + log: Logger + connection: LengthPrefixedStream + crypto: ICrypto + privateKey: PrivateKey + prologue: Uint8Array + /** static keypair */ + s: KeyPair + remoteIdentityKey?: PublicKey + extensions?: NoiseExtensions +} + +export interface HandshakeResult { + payload: NoiseHandshakePayload + encrypt (plaintext: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList + decrypt (ciphertext: Uint8Array | Uint8ArrayList, dst?: Uint8Array): Uint8Array | Uint8ArrayList +} + +/** + * A CipherState object contains k and n variables, which it uses to encrypt and + * decrypt ciphertext. + * + * During the handshake phase each party has a single CipherState, but during + * the transport phase each party has two CipherState objects: one for sending, + * and one for receiving. + */ +export interface ICipherState { + /** + * A cipher key of 32 bytes (which may be empty). Empty is a special value + * which indicates k has not yet been initialized. */ + k?: Uint8Array + /** + * An 8-byte (64-bit) unsigned integer nonce. + * + * For performance reasons, the nonce is represented as a Nonce object + * The nonce is treated as a uint64, even though the underlying `number` only + * has 52 safely-available bits. + */ + n: Nonce +} + +/** + * A SymmetricState object contains a CipherState plus ck and h variables. It is + * so-named because it encapsulates all the "symmetric crypto" used by Noise. + * + * During the handshake phase each party has a single SymmetricState, which can + * be deleted once the handshake is finished. + */ +export interface ISymmetricState { + cs: ICipherState + /** A chaining key of 32 bytes. */ + ck: Uint8Array + /** A hash output of 32 bytes. */ + h: Uint8Array +} + +/** + * A HandshakeState object contains a SymmetricState plus DH variables (s, e, + * rs, re) and a variable representing the handshake pattern. + * + * During the handshake phase each party has a single HandshakeState, which can + * be deleted once the handshake is finished. + */ +export interface IHandshakeState { + ss: ISymmetricState + /** The local static key pair */ + s?: KeyPair + /** The local ephemeral key pair */ + e?: KeyPair + /** The remote party's static public key */ + rs?: Uint8Array | Uint8ArrayList + /** The remote party's ephemeral public key */ + re?: Uint8Array | Uint8ArrayList +} + +export interface KeyPair { + publicKey: Uint8Array + privateKey: Uint8Array +} + +export interface INoiseExtensions { + webtransportCerthashes: Uint8Array[] +} + +export interface INoiseConnection extends ConnectionEncrypter { } diff --git a/packages/connection-encrypter-noise/src/utils.ts b/packages/connection-encrypter-noise/src/utils.ts new file mode 100644 index 0000000000..c0949517b9 --- /dev/null +++ b/packages/connection-encrypter-noise/src/utils.ts @@ -0,0 +1,225 @@ +import { publicKeyFromProtobuf, publicKeyToProtobuf } from '@libp2p/crypto/keys' +import { StreamMessageEvent, UnexpectedPeerError } from '@libp2p/interface' +import { AbstractMessageStream, LengthPrefixedDecoder } from '@libp2p/utils' +import { Uint8ArrayList } from 'uint8arraylist' +import { concat as uint8ArrayConcat } from 'uint8arrays/concat' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { CHACHA_TAG_LENGTH, NOISE_MSG_MAX_LENGTH_BYTES, NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG } from './constants.ts' +import { uint16BEEncode, uint16BEDecode } from './encoder.ts' +import { NoiseHandshakePayload } from './proto/payload.js' +import type { MetricsRegistry } from './metrics.ts' +import type { NoiseExtensions } from './proto/payload.js' +import type { HandshakeResult } from './types.ts' +import type { AbortOptions, MessageStream, PrivateKey, PublicKey } from '@libp2p/interface' +import type { SendResult } from '@libp2p/utils' + +export async function createHandshakePayload ( + privateKey: PrivateKey, + staticPublicKey: Uint8Array | Uint8ArrayList, + extensions?: NoiseExtensions +): Promise { + const identitySig = await privateKey.sign(getSignaturePayload(staticPublicKey)) + + return NoiseHandshakePayload.encode({ + identityKey: publicKeyToProtobuf(privateKey.publicKey), + identitySig, + extensions + }) +} + +export async function decodeHandshakePayload ( + payloadBytes: Uint8Array | Uint8ArrayList, + remoteStaticKey?: Uint8Array | Uint8ArrayList, + remoteIdentityKey?: PublicKey +): Promise { + try { + const payload = NoiseHandshakePayload.decode(payloadBytes) + const publicKey = publicKeyFromProtobuf(payload.identityKey) + + if (remoteIdentityKey?.equals(publicKey) === false) { + throw new Error(`Payload identity key ${publicKey} does not match expected remote identity key ${remoteIdentityKey}`) + } + + if (!remoteStaticKey) { + throw new Error('Remote static does not exist') + } + + const signaturePayload = getSignaturePayload(remoteStaticKey) + + if (!(await publicKey.verify(signaturePayload, payload.identitySig))) { + throw new Error('Invalid payload signature') + } + + return payload + } catch (e) { + throw new UnexpectedPeerError((e as Error).message) + } +} + +export function getSignaturePayload (publicKey: Uint8Array | Uint8ArrayList): Uint8Array | Uint8ArrayList { + const prefix = uint8ArrayFromString('noise-libp2p-static-key:') + + if (publicKey instanceof Uint8Array) { + return uint8ArrayConcat([prefix, publicKey], prefix.length + publicKey.length) + } + + publicKey.prepend(prefix) + + return publicKey +} + +class EncryptedMessageStream extends AbstractMessageStream { + private stream: MessageStream + private handshake: HandshakeResult + private metrics?: MetricsRegistry + private decoder: LengthPrefixedDecoder + + constructor (stream: MessageStream, handshake: HandshakeResult, metrics?: MetricsRegistry) { + super({ + log: stream.log, + inactivityTimeout: stream.inactivityTimeout, + maxPauseBufferLength: stream.maxPauseBufferLength, + direction: stream.direction + }) + + this.stream = stream + this.handshake = handshake + this.metrics = metrics + this.decoder = new LengthPrefixedDecoder({ + lengthDecoder: uint16BEDecode, + encodingLength: () => 2 + }) + + this.stream.addEventListener('message', (evt) => { + try { + for (const buf of this.decoder.decode(evt.data)) { + const decrypted = this.decrypt(buf) + this.dispatchEvent(new StreamMessageEvent(decrypted)) + } + } catch (err: any) { + this.abort(err) + } + }) + + this.stream.addEventListener('close', (evt) => { + if (evt.error != null) { + if (evt.local === true) { + this.abort(evt.error) + } else { + this.onRemoteReset() + } + } else { + this.onClosed() + } + }) + + this.stream.addEventListener('drain', () => { + this.safeDispatchEvent('drain') + }) + + this.stream.addEventListener('remoteCloseWrite', () => { + this.onRemoteCloseWrite() + }) + + this.stream.addEventListener('remoteCloseRead', () => { + this.onRemoteCloseRead() + }) + } + + encrypt (chunk: Uint8Array | Uint8ArrayList): Uint8ArrayList { + const output = new Uint8ArrayList() + + for (let i = 0; i < chunk.byteLength; i += NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG) { + let end = i + NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG + if (end > chunk.byteLength) { + end = chunk.byteLength + } + + let data: Uint8Array | Uint8ArrayList + + if (chunk instanceof Uint8Array) { + data = this.handshake.encrypt(chunk.subarray(i, end)) + } else { + data = this.handshake.encrypt(chunk.sublist(i, end)) + } + + this.metrics?.encryptedPackets.increment() + + output.append(uint16BEEncode(data.byteLength)) + output.append(data) + } + + return output + } + + decrypt (chunk: Uint8Array | Uint8ArrayList): Uint8ArrayList { + const output = new Uint8ArrayList() + + for (let i = 0; i < chunk.byteLength; i += NOISE_MSG_MAX_LENGTH_BYTES) { + let end = i + NOISE_MSG_MAX_LENGTH_BYTES + if (end > chunk.byteLength) { + end = chunk.byteLength + } + + if (end - CHACHA_TAG_LENGTH < i) { + throw new Error('Invalid chunk') + } + + let encrypted: Uint8Array | Uint8ArrayList + + if (chunk instanceof Uint8Array) { + encrypted = chunk.subarray(i, end) + } else { + encrypted = chunk.sublist(i, end) + } + + // memory allocation is not cheap so reuse the encrypted Uint8Array + // see https://github.com/ChainSafe/js-libp2p-noise/pull/242#issue-1422126164 + // this is ok because chacha20 reads bytes one by one and don't reread after that + // it's also tested in https://github.com/ChainSafe/as-chacha20poly1305/pull/1/files#diff-25252846b58979dcaf4e41d47b3eadd7e4f335e7fb98da6c049b1f9cd011f381R48 + const dst = chunk.subarray(i, end - CHACHA_TAG_LENGTH) + try { + const plaintext = this.handshake.decrypt(encrypted, dst) + this.metrics?.decryptedPackets.increment() + + output.append(plaintext) + } catch (e) { + this.metrics?.decryptErrors.increment() + throw e + } + } + + return output + } + + sendPause (): void { + this.stream.pause() + } + + sendResume (): void { + this.stream.resume() + } + + async sendCloseWrite (options?: AbortOptions): Promise { + return this.stream.closeWrite(options) + } + + async sendCloseRead (options?: AbortOptions): Promise { + return this.stream.closeRead(options) + } + + sendReset (err: Error): void { + this.stream.abort(err) + } + + sendData (data: Uint8ArrayList): SendResult { + return { + sentBytes: data.byteLength, + canSendMore: this.stream.send(this.encrypt(data)) + } + } +} + +export function toMessageStream (connection: MessageStream, handshake: HandshakeResult, metrics?: MetricsRegistry): MessageStream { + return new EncryptedMessageStream(connection, handshake, metrics) +} diff --git a/packages/connection-encrypter-noise/test/compliance.spec.ts b/packages/connection-encrypter-noise/test/compliance.spec.ts new file mode 100644 index 0000000000..fc458a5357 --- /dev/null +++ b/packages/connection-encrypter-noise/test/compliance.spec.ts @@ -0,0 +1,26 @@ +import { generateKeyPair } from '@libp2p/crypto/keys' +import tests from '@libp2p/interface-compliance-tests/connection-encryption' +import { defaultLogger } from '@libp2p/logger' +import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { stubInterface } from 'sinon-ts' +import { Noise } from '../src/noise.js' +import type { Upgrader } from '@libp2p/interface' + +describe('spec compliance tests', function () { + tests({ + async setup (opts) { + const privateKey = opts?.privateKey ?? await generateKeyPair('Ed25519') + const peerId = peerIdFromPrivateKey(privateKey) + + return new Noise({ + privateKey, + peerId, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }) + }, + async teardown () {} + }) +}) diff --git a/packages/connection-encrypter-noise/test/fixtures/peer.ts b/packages/connection-encrypter-noise/test/fixtures/peer.ts new file mode 100644 index 0000000000..a044c31c4f --- /dev/null +++ b/packages/connection-encrypter-noise/test/fixtures/peer.ts @@ -0,0 +1,47 @@ +import { generateKeyPair, privateKeyFromProtobuf } from '@libp2p/crypto/keys' +import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { base64pad } from 'multiformats/bases/base64' +import type { PeerId, PrivateKey } from '@libp2p/interface' + +// ed25519 keys +const peers = [{ + id: '12D3KooWH45PiqBjfnEfDfCD6TqJrpqTBJvQDwGHvjGpaWwms46D', + privKey: 'CAESYBtKXrMwawAARmLScynQUuSwi/gGSkwqDPxi15N3dqDHa4T4iWupkMe5oYGwGH3Hyfvd/QcgSTqg71oYZJadJ6prhPiJa6mQx7mhgbAYfcfJ+939ByBJOqDvWhhklp0nqg==', + pubKey: 'CAESIGuE+IlrqZDHuaGBsBh9x8n73f0HIEk6oO9aGGSWnSeq' +}, { + id: '12D3KooWP63uzL78BRMpkQ7augMdNi1h3VBrVWZucKjyhzGVaSi1', + privKey: 'CAESYPxO3SHyfc2578hDmfkGGBY255JjiLuVavJWy+9ivlpsxSyVKf36ipyRGL6szGzHuFs5ceEuuGVrPMg/rW2Ch1bFLJUp/fqKnJEYvqzMbMe4Wzlx4S64ZWs8yD+tbYKHVg==', + pubKey: 'CAESIMUslSn9+oqckRi+rMxsx7hbOXHhLrhlazzIP61tgodW' +}, { + id: '12D3KooWF85R7CM2Wikdtb2sjwnd24e1tgojf3MEWwizmVB8PA6U', + privKey: 'CAESYNXoQ5CnooE939AEqE2JJGPqvhoFJn0xP+j9KwjfOfDkTtPyfn2kJ1gn3uOYTcmoHFU1bbETNtRVuPMi1fmDmqFO0/J+faQnWCfe45hNyagcVTVtsRM21FW48yLV+YOaoQ==', + pubKey: 'CAESIE7T8n59pCdYJ97jmE3JqBxVNW2xEzbUVbjzItX5g5qh' +}, { + id: '12D3KooWPCofiCjhdtezP4eMnqBjjutFZNHjV39F5LWNrCvaLnzT', + privKey: 'CAESYLhUut01XPu+yIPbtZ3WnxOd26FYuTMRn/BbdFYsZE2KxueKRlo9yIAxmFReoNFUKztUU4G2aUiTbqDQaA6i0MDG54pGWj3IgDGYVF6g0VQrO1RTgbZpSJNuoNBoDqLQwA==', + pubKey: 'CAESIMbnikZaPciAMZhUXqDRVCs7VFOBtmlIk26g0GgOotDA' +}] + +export async function createPeerIdsFromFixtures (length: number): Promise> { + return Promise.all( + Array.from({ length }).map(async (_, i) => { + const privateKey = privateKeyFromProtobuf(base64pad.decode(`M${peers[i].privKey}`)) + + return { + privateKey, + peerId: peerIdFromPrivateKey(privateKey) + } + }) + ) +} + +export async function createPeerIds (length: number): Promise { + const peerIds: PeerId[] = [] + for (let i = 0; i < length; i++) { + const privateKey = await generateKeyPair('Ed25519') + const id = peerIdFromPrivateKey(privateKey) + peerIds.push(id) + } + + return peerIds +} diff --git a/packages/connection-encrypter-noise/test/index.spec.ts b/packages/connection-encrypter-noise/test/index.spec.ts new file mode 100644 index 0000000000..5d45cb7984 --- /dev/null +++ b/packages/connection-encrypter-noise/test/index.spec.ts @@ -0,0 +1,90 @@ +import { generateKeyPair } from '@libp2p/crypto/keys' +import { defaultLogger } from '@libp2p/logger' +import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { lpStream, multiaddrConnectionPair } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { noise } from '../src/index.js' +import { Noise } from '../src/noise.js' +import type { Metrics, Upgrader } from '@libp2p/interface' + +function createCounterSpy (): ReturnType { + return sinon.spy({ + increment: () => {}, + reset: () => {} + }) +} + +describe('Index', () => { + it('should expose class with tag and required functions', async () => { + const privateKey = await generateKeyPair('Ed25519') + const peerId = peerIdFromPrivateKey(privateKey) + + const noiseInstance = noise()({ + privateKey, + peerId, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }) + expect(noiseInstance.protocol).to.equal('/noise') + expect(typeof (noiseInstance.secureInbound)).to.equal('function') + expect(typeof (noiseInstance.secureOutbound)).to.equal('function') + }) + + it('should collect metrics', async () => { + const metricsRegistry = new Map>() + const metrics = { + registerCounter: (name: string) => { + const counter = createCounterSpy() + metricsRegistry.set(name, counter) + return counter + } + } + + const privateKeyInit = await generateKeyPair('Ed25519') + const peerIdInit = peerIdFromPrivateKey(privateKeyInit) + const noiseInit = new Noise({ + privateKey: privateKeyInit, + peerId: peerIdInit, + logger: defaultLogger(), + metrics: metrics as any as Metrics, + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }) + + const privateKeyResp = await generateKeyPair('Ed25519') + const peerIdResp = peerIdFromPrivateKey(privateKeyResp) + const noiseResp = new Noise({ + privateKey: privateKeyResp, + peerId: peerIdResp, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: peerIdResp + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: peerIdInit + }) + ]) + const wrappedInbound = lpStream(inbound.connection) + const wrappedOutbound = lpStream(outbound.connection) + + await wrappedOutbound.write(uint8ArrayFromString('test')) + await wrappedInbound.read() + expect(metricsRegistry.get('libp2p_noise_xxhandshake_successes_total')?.increment.callCount).to.equal(1) + expect(metricsRegistry.get('libp2p_noise_xxhandshake_error_total')?.increment.callCount).to.equal(0) + expect(metricsRegistry.get('libp2p_noise_encrypted_packets_total')?.increment.callCount).to.equal(1) + expect(metricsRegistry.get('libp2p_noise_decrypt_errors_total')?.increment.callCount).to.equal(0) + }) +}) diff --git a/packages/connection-encrypter-noise/test/interop.ts b/packages/connection-encrypter-noise/test/interop.ts new file mode 100644 index 0000000000..2eb703ed0e --- /dev/null +++ b/packages/connection-encrypter-noise/test/interop.ts @@ -0,0 +1,113 @@ +import fs from 'fs' +import { yamux } from '@chainsafe/libp2p-yamux' +import { privateKeyFromProtobuf } from '@libp2p/crypto/keys' +import { createClient } from '@libp2p/daemon-client' +import { createServer } from '@libp2p/daemon-server' +import { connectInteropTests } from '@libp2p/interop' +import { logger } from '@libp2p/logger' +import { tcp } from '@libp2p/tcp' +import { multiaddr } from '@multiformats/multiaddr' +import { execa } from 'execa' +import { path as p2pd } from 'go-libp2p' +import { createLibp2p } from 'libp2p' +import pDefer from 'p-defer' +import { noise } from '../src/index.js' +import type { PrivateKey } from '@libp2p/interface' +import type { SpawnOptions, Daemon, DaemonFactory } from '@libp2p/interop' +import type { Libp2pOptions } from 'libp2p' + +async function createGoPeer (options: SpawnOptions): Promise { + const controlPort = Math.floor(Math.random() * (50000 - 10000 + 1)) + 10000 + const apiAddr = multiaddr(`/ip4/0.0.0.0/tcp/${controlPort}`) + + const log = logger(`go-libp2p:${controlPort}`) + + const opts = [ + `-listen=${apiAddr.toString()}`, + '-hostAddrs=/ip4/0.0.0.0/tcp/0' + ] + + if (options.encryption === 'noise') { + opts.push('-noise=true') + } + + if (options.key != null) { + opts.push(`-id=${options.key}`) + } + + const deferred = pDefer() + const proc = execa(p2pd(), opts) + + proc.stdout?.on('data', (buf: Buffer) => { + const str = buf.toString() + log(str) + + // daemon has started + if (str.includes('Control socket:')) { + deferred.resolve() + } + }) + + proc.stderr?.on('data', (buf) => { + log.error(buf.toString()) + }) + + await deferred.promise + + return { + client: createClient(apiAddr), + stop: async () => { + proc.kill() + } + } +} + +async function createJsPeer (options: SpawnOptions): Promise { + let privateKey: PrivateKey | undefined + + if (options.key != null) { + const keyFile = fs.readFileSync(options.key) + privateKey = privateKeyFromProtobuf(keyFile) + } + + const opts: Libp2pOptions = { + privateKey, + addresses: { + listen: ['/ip4/0.0.0.0/tcp/0'] + }, + transports: [tcp()], + streamMuxers: [yamux()], + connectionEncrypters: [noise()] + } + + const node = await createLibp2p(opts) + const server = createServer(multiaddr('/ip4/0.0.0.0/tcp/0'), node as any) + await server.start() + + return { + client: createClient(server.getMultiaddr()), + stop: async () => { + await server.stop() + await node.stop() + } + } +} + +async function main (): Promise { + const factory: DaemonFactory = { + async spawn (options: SpawnOptions) { + if (options.type === 'go') { + return createGoPeer(options) + } + + return createJsPeer(options) + } + } + + connectInteropTests(factory) +} + +main().catch(err => { + console.error(err) // eslint-disable-line no-console + process.exit(1) +}) diff --git a/packages/connection-encrypter-noise/test/muxers.spec.ts b/packages/connection-encrypter-noise/test/muxers.spec.ts new file mode 100644 index 0000000000..3ccd6e6d4b --- /dev/null +++ b/packages/connection-encrypter-noise/test/muxers.spec.ts @@ -0,0 +1,167 @@ +import { defaultLogger } from '@libp2p/logger' +import { multiaddrConnectionPair } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import { stubInterface } from 'sinon-ts' +import { Noise } from '../src/noise.js' +import { createPeerIdsFromFixtures } from './fixtures/peer.js' +import type { StreamMuxerFactory, Upgrader, SecureConnectionOptions, SecuredConnection, PeerId, PrivateKey } from '@libp2p/interface' +import type { StubbedInstance } from 'sinon-ts' + +describe('early muxer selection', () => { + let initUpgrader: StubbedInstance + let respUpgrader: StubbedInstance + let remotePeer: { peerId: PeerId, privateKey: PrivateKey } + let localPeer: { peerId: PeerId, privateKey: PrivateKey } + + beforeEach(async () => { + [localPeer, remotePeer] = await createPeerIdsFromFixtures(2) + + initUpgrader = stubInterface() + respUpgrader = stubInterface() + }) + + async function testMuxerNegotiation (outboundOpts?: SecureConnectionOptions, inboundOpts?: SecureConnectionOptions): Promise<[SecuredConnection, SecuredConnection]> { + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: initUpgrader + }) + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: respUpgrader + }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + + return Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId, + ...inboundOpts + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: localPeer.peerId, + ...outboundOpts + }) + ]) + } + + it('should negotiate early stream muxer', async () => { + const commonMuxer = '/common/muxer' + + initUpgrader.getStreamMuxers.returns(new Map([ + ['/other/muxer', stubInterface()], + [commonMuxer, stubInterface({ + protocol: commonMuxer + })] + ])) + respUpgrader.getStreamMuxers.returns(new Map([ + [commonMuxer, stubInterface({ + protocol: commonMuxer + })], + ['/another/muxer', stubInterface()] + ])) + + const [securedInbound, securedOutbound] = await testMuxerNegotiation() + + expect(securedInbound).to.have.nested.property('streamMuxer.protocol', commonMuxer) + expect(securedOutbound).to.have.nested.property('streamMuxer.protocol', commonMuxer) + }) + + it('should fail to negotiate early muxer when there are no common muxers', async () => { + initUpgrader.getStreamMuxers.returns(new Map([ + ['/other/muxer', stubInterface()], + ['/yet/other/muxer', stubInterface()] + ])) + respUpgrader.getStreamMuxers.returns(new Map([ + ['/another/muxer', stubInterface()], + ['/yet/another/muxer', stubInterface()] + ])) + + await expect(testMuxerNegotiation()).to.eventually.be.rejectedWith(/no common muxers/) + }) + + it('should not negotiate early muxer when no muxers are sent', async () => { + initUpgrader.getStreamMuxers.returns(new Map([])) + respUpgrader.getStreamMuxers.returns(new Map([])) + + const [securedInbound, securedOutbound] = await testMuxerNegotiation() + + expect(securedInbound).to.have.property('streamMuxer', undefined) + expect(securedOutbound).to.have.property('streamMuxer', undefined) + }) + + it('should skip selecting stream muxers', async () => { + const commonMuxer = '/common/muxer' + + initUpgrader.getStreamMuxers.returns(new Map([ + ['/other/muxer', stubInterface()], + [commonMuxer, stubInterface({ + protocol: commonMuxer + })] + ])) + respUpgrader.getStreamMuxers.returns(new Map([ + [commonMuxer, stubInterface({ + protocol: commonMuxer + })], + ['/another/muxer', stubInterface()] + ])) + + const [securedInbound, securedOutbound] = await testMuxerNegotiation({ + skipStreamMuxerNegotiation: true + }, { + skipStreamMuxerNegotiation: true + }) + + expect(securedInbound).to.have.property('streamMuxer', undefined) + expect(securedOutbound).to.have.property('streamMuxer', undefined) + }) + + it('should not select muxer if only initiator requires it', async () => { + const commonMuxer = '/common/muxer' + + initUpgrader.getStreamMuxers.returns(new Map([ + ['/other/muxer', stubInterface()], + [commonMuxer, stubInterface({ + protocol: commonMuxer + })] + ])) + respUpgrader.getStreamMuxers.returns(new Map([ + [commonMuxer, stubInterface({ + protocol: commonMuxer + })], + ['/another/muxer', stubInterface()] + ])) + + const [securedInbound, securedOutbound] = await testMuxerNegotiation({ + skipStreamMuxerNegotiation: true + }) + + expect(securedInbound).to.have.property('streamMuxer', undefined) + expect(securedOutbound).to.have.property('streamMuxer', undefined) + }) + + it('should not select muxer if only responder requires it', async () => { + const commonMuxer = '/common/muxer' + + initUpgrader.getStreamMuxers.returns(new Map([ + ['/other/muxer', stubInterface()], + [commonMuxer, stubInterface({ + protocol: commonMuxer + })] + ])) + respUpgrader.getStreamMuxers.returns(new Map([ + [commonMuxer, stubInterface({ + protocol: commonMuxer + })], + ['/another/muxer', stubInterface()] + ])) + + const [securedInbound, securedOutbound] = await testMuxerNegotiation({}, { + skipStreamMuxerNegotiation: true + }) + + expect(securedInbound).to.have.property('streamMuxer', undefined) + expect(securedOutbound).to.have.property('streamMuxer', undefined) + }) +}) diff --git a/packages/connection-encrypter-noise/test/noise.spec.ts b/packages/connection-encrypter-noise/test/noise.spec.ts new file mode 100644 index 0000000000..27bdc08154 --- /dev/null +++ b/packages/connection-encrypter-noise/test/noise.spec.ts @@ -0,0 +1,305 @@ +import { Buffer } from 'buffer' +import { defaultLogger } from '@libp2p/logger' +import { lpStream, byteStream, multiaddrConnectionPair } from '@libp2p/utils' +import { assert, expect } from 'aegir/chai' +import { randomBytes } from 'iso-random-stream' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { pureJsCrypto } from '../src/crypto/js.js' +import { Noise } from '../src/noise.js' +import { createPeerIdsFromFixtures } from './fixtures/peer.js' +import type { StreamMuxerFactory, PeerId, PrivateKey, Upgrader } from '@libp2p/interface' + +describe('Noise', () => { + let remotePeer: { peerId: PeerId, privateKey: PrivateKey } + let localPeer: { peerId: PeerId, privateKey: PrivateKey } + const sandbox = sinon.createSandbox() + + before(async () => { + [localPeer, remotePeer] = await createPeerIdsFromFixtures(2) + }) + + afterEach(function () { + sandbox.restore() + }) + + it('should communicate through encrypted streams without noise pipes', async () => { + try { + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, extensions: undefined }) + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, extensions: undefined }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: localPeer.peerId + }) + ]) + + expect(inbound).to.not.have.property('streamMuxer', 'inbound connection selected early muxer') + expect(outbound).to.not.have.property('streamMuxer', 'outbound connection selected early muxer') + + const wrappedInbound = lpStream(inbound.connection) + const wrappedOutbound = lpStream(outbound.connection) + + await wrappedOutbound.write(Buffer.from('test')) + const response = await wrappedInbound.read() + expect(uint8ArrayToString(response.slice())).equal('test') + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should test large payloads', async function () { + this.timeout(10000) + try { + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined }) + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: localPeer.peerId + }) + ]) + const wrappedInbound = byteStream(inbound.connection) + const wrappedOutbound = lpStream(outbound.connection) + + const largePlaintext = randomBytes(60000) + await wrappedOutbound.write(Buffer.from(largePlaintext)) + const response = await wrappedInbound.read({ + bytes: 60000 + }) + + expect(response.length).equals(largePlaintext.length) + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should working without remote peer provided in incoming connection', async () => { + try { + const staticKeysInitiator = pureJsCrypto.generateX25519KeyPair() + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: staticKeysInitiator.privateKey }) + const staticKeysResponder = pureJsCrypto.generateX25519KeyPair() + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: staticKeysResponder.privateKey }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection) + ]) + const wrappedInbound = lpStream(inbound.connection) + const wrappedOutbound = lpStream(outbound.connection) + + await wrappedOutbound.write(Buffer.from('test v2')) + const response = await wrappedInbound.read() + expect(uint8ArrayToString(response.slice())).equal('test v2') + + if (inbound.remotePeer.publicKey == null || localPeer.peerId.publicKey == null || + outbound.remotePeer.publicKey == null || remotePeer.peerId.publicKey == null) { + throw new Error('Public key missing from PeerId') + } + + expect(inbound.remotePeer.publicKey?.raw).to.equalBytes(localPeer.peerId.publicKey.raw) + expect(outbound.remotePeer.publicKey?.raw).to.equalBytes(remotePeer.peerId.publicKey.raw) + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should accept and return Noise extension from remote peer', async () => { + try { + const certhashInit = Buffer.from('certhash data from init') + const staticKeysInitiator = pureJsCrypto.generateX25519KeyPair() + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: staticKeysInitiator.privateKey, extensions: { webtransportCerthashes: [certhashInit] } }) + const staticKeysResponder = pureJsCrypto.generateX25519KeyPair() + const certhashResp = Buffer.from('certhash data from response') + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: staticKeysResponder.privateKey, extensions: { webtransportCerthashes: [certhashResp] } }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection) + ]) + + assert(uint8ArrayEquals(inbound.remoteExtensions?.webtransportCerthashes[0] ?? new Uint8Array(), certhashInit)) + assert(uint8ArrayEquals(outbound.remoteExtensions?.webtransportCerthashes[0] ?? new Uint8Array(), certhashResp)) + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should accept and return early muxer from remote peer', async () => { + try { + const streamMuxerProtocol = '/my-early-muxer' + const streamMuxer = stubInterface({ + protocol: streamMuxerProtocol + }) + const staticKeysInitiator = pureJsCrypto.generateX25519KeyPair() + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map([[streamMuxerProtocol, streamMuxer]]) + }) + }, { staticNoiseKey: staticKeysInitiator.privateKey }) + const staticKeysResponder = pureJsCrypto.generateX25519KeyPair() + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map([[streamMuxerProtocol, streamMuxer]]) + }) + }, { staticNoiseKey: staticKeysResponder.privateKey }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection) + ]) + + expect(inbound).to.have.nested.property('streamMuxer.protocol', streamMuxerProtocol, 'inbound connection did not select early muxer') + expect(outbound).to.have.nested.property('streamMuxer.protocol', streamMuxerProtocol, 'outbound connection did not select early muxer') + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should accept a prologue', async () => { + try { + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, crypto: pureJsCrypto, prologueBytes: Buffer.from('Some prologue') }) + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, crypto: pureJsCrypto, prologueBytes: Buffer.from('Some prologue') }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + const [outbound, inbound] = await Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: localPeer.peerId + }) + ]) + const wrappedInbound = lpStream(inbound.connection) + const wrappedOutbound = lpStream(outbound.connection) + + await wrappedOutbound.write(Buffer.from('test')) + const response = await wrappedInbound.read() + expect(uint8ArrayToString(response.slice())).equal('test') + } catch (e) { + const err = e as Error + assert(false, err.message) + } + }) + + it('should abort noise handshake', async () => { + const abortController = new AbortController() + abortController.abort() + + const noiseInit = new Noise({ + ...localPeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, extensions: undefined }) + const noiseResp = new Noise({ + ...remotePeer, + logger: defaultLogger(), + upgrader: stubInterface({ + getStreamMuxers: () => new Map() + }) + }, { staticNoiseKey: undefined, extensions: undefined }) + + const [inboundConnection, outboundConnection] = multiaddrConnectionPair() + + await expect(Promise.all([ + noiseInit.secureOutbound(outboundConnection, { + remotePeer: remotePeer.peerId, + signal: abortController.signal + }), + noiseResp.secureInbound(inboundConnection, { + remotePeer: localPeer.peerId + }) + ])).to.eventually.be.rejected + .with.property('name', 'AbortError') + }) +}) diff --git a/packages/connection-encrypter-noise/test/performHandshake.spec.ts b/packages/connection-encrypter-noise/test/performHandshake.spec.ts new file mode 100644 index 0000000000..96be395f3a --- /dev/null +++ b/packages/connection-encrypter-noise/test/performHandshake.spec.ts @@ -0,0 +1,131 @@ +import { Buffer } from 'buffer' +import { defaultLogger } from '@libp2p/logger' +import { multiaddrConnectionPair, lpStream } from '@libp2p/utils' +import { assert, expect } from 'aegir/chai' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { defaultCrypto } from '../src/crypto/index.js' +import { wrapCrypto } from '../src/crypto.js' +import { performHandshakeInitiator, performHandshakeResponder } from '../src/performHandshake.js' +import { createPeerIdsFromFixtures } from './fixtures/peer.js' +import type { PrivateKey, PeerId } from '@libp2p/interface' + +describe('performHandshake', () => { + let peerA: { peerId: PeerId, privateKey: PrivateKey } + let peerB: { peerId: PeerId, privateKey: PrivateKey } + let fakePeer: { peerId: PeerId, privateKey: PrivateKey } + + before(async () => { + [peerA, peerB, fakePeer] = await createPeerIdsFromFixtures(3) + if (!peerA.privateKey || !peerB.privateKey || !fakePeer.privateKey) { throw new Error('unreachable') } + }) + + it('should propose, exchange and finish handshake', async () => { + const duplex = multiaddrConnectionPair() + const connectionInitiator = lpStream(duplex[0]) + const connectionResponder = lpStream(duplex[1]) + + const prologue = Buffer.alloc(0) + const staticKeysInitiator = defaultCrypto.generateX25519KeyPair() + const staticKeysResponder = defaultCrypto.generateX25519KeyPair() + + const [initiator, responder] = await Promise.all([ + performHandshakeInitiator({ + log: defaultLogger().forComponent('test'), + connection: connectionInitiator, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerA.privateKey, + prologue, + remoteIdentityKey: peerB.privateKey.publicKey, + s: staticKeysInitiator + }), + performHandshakeResponder({ + log: defaultLogger().forComponent('test'), + connection: connectionResponder, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerB.privateKey, + prologue, + remoteIdentityKey: peerA.privateKey.publicKey, + s: staticKeysResponder + }) + ]) + + // Test encryption and decryption + const encrypted = initiator.encrypt(Buffer.from('encrypt this')) + const decrypted = responder.decrypt(encrypted) + assert(uint8ArrayEquals(decrypted.subarray(), Buffer.from('encrypt this'))) + }) + + it('Initiator should fail to exchange handshake if given wrong public key in payload', async () => { + try { + const duplex = multiaddrConnectionPair() + const connectionInitiator = lpStream(duplex[0]) + const connectionResponder = lpStream(duplex[1]) + + const prologue = Buffer.alloc(0) + const staticKeysInitiator = defaultCrypto.generateX25519KeyPair() + const staticKeysResponder = defaultCrypto.generateX25519KeyPair() + + await Promise.all([ + performHandshakeInitiator({ + log: defaultLogger().forComponent('test'), + connection: connectionInitiator, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerA.privateKey, + prologue, + remoteIdentityKey: fakePeer.privateKey.publicKey, // <----- look here + s: staticKeysInitiator + }), + performHandshakeResponder({ + log: defaultLogger().forComponent('test'), + connection: connectionResponder, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerB.privateKey, + prologue, + remoteIdentityKey: peerA.privateKey.publicKey, + s: staticKeysResponder + }) + ]) + + assert(false, 'Should throw exception') + } catch (e) { + expect((e as Error).message).equals(`Payload identity key ${peerB.privateKey.publicKey} does not match expected remote identity key ${fakePeer.privateKey.publicKey}`) + } + }) + + it('Responder should fail to exchange handshake if given wrong public key in payload', async () => { + try { + const duplex = multiaddrConnectionPair() + const connectionInitiator = lpStream(duplex[0]) + const connectionResponder = lpStream(duplex[1]) + + const prologue = Buffer.alloc(0) + const staticKeysInitiator = defaultCrypto.generateX25519KeyPair() + const staticKeysResponder = defaultCrypto.generateX25519KeyPair() + + await Promise.all([ + performHandshakeInitiator({ + log: defaultLogger().forComponent('test'), + connection: connectionInitiator, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerA.privateKey, + prologue, + remoteIdentityKey: peerB.privateKey.publicKey, + s: staticKeysInitiator + }), + performHandshakeResponder({ + log: defaultLogger().forComponent('test'), + connection: connectionResponder, + crypto: wrapCrypto(defaultCrypto), + privateKey: peerB.privateKey, + prologue, + remoteIdentityKey: fakePeer.privateKey.publicKey, + s: staticKeysResponder + }) + ]) + + assert(false, 'Should throw exception') + } catch (e) { + expect((e as Error).message).equals(`Payload identity key ${peerA.privateKey.publicKey} does not match expected remote identity key ${fakePeer.privateKey.publicKey}`) + } + }) +}) diff --git a/packages/connection-encrypter-noise/test/protocol.spec.ts b/packages/connection-encrypter-noise/test/protocol.spec.ts new file mode 100644 index 0000000000..017b0cae12 --- /dev/null +++ b/packages/connection-encrypter-noise/test/protocol.spec.ts @@ -0,0 +1,112 @@ +import { Buffer } from 'buffer' +import { expect, assert } from 'aegir/chai' +import { Uint8ArrayList } from 'uint8arraylist' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { pureJsCrypto } from '../src/crypto/js.js' +import { wrapCrypto } from '../src/crypto.js' +import { XXHandshakeState, ZEROLEN } from '../src/protocol.js' +import type { CipherState, SymmetricState } from '../src/protocol.js' + +describe('XXHandshakeState', () => { + const prologue = Buffer.alloc(0) + const protocolName = 'Noise_XX_25519_ChaChaPoly_SHA256' + + it('Test creating new XX session', async () => { + try { + // eslint-disable-next-line no-new + new XXHandshakeState({ crypto: wrapCrypto(pureJsCrypto), protocolName, initiator: true, prologue }) + } catch (e) { + assert(false, (e as Error).message) + } + }) + + it('Test get HKDF', () => { + const ckBytes = Buffer.from('4e6f6973655f58585f32353531395f58436861436861506f6c795f53484132353600000000000000000000000000000000000000000000000000000000000000', 'hex') + const ikm = Buffer.from('a3eae50ea37a47e8a7aa0c7cd8e16528670536dcd538cebfd724fb68ce44f1910ad898860666227d4e8dd50d22a9a64d1c0a6f47ace092510161e9e442953da3', 'hex') + const ck = Buffer.alloc(32) + ckBytes.copy(ck) + + const [k1, k2, k3] = pureJsCrypto.getHKDF(ck, ikm) + expect(uint8ArrayToString(k1, 'hex')).to.equal('cc5659adff12714982f806e2477a8d5ddd071def4c29bb38777b7e37046f6914') + expect(uint8ArrayToString(k2, 'hex')).to.equal('a16ada915e551ab623f38be674bb4ef15d428ae9d80688899c9ef9b62ef208fa') + expect(uint8ArrayToString(k3, 'hex')).to.equal('ff67bf9727e31b06efc203907e6786667d2c7a74ac412b4d31a80ba3fd766f68') + }) + + interface ProtocolHandshakeResult { ss: SymmetricState, cs1: CipherState, cs2: CipherState } + async function doHandshake (): Promise<{ nsInit: ProtocolHandshakeResult, nsResp: ProtocolHandshakeResult }> { + const kpInit = pureJsCrypto.generateX25519KeyPair() + const kpResp = pureJsCrypto.generateX25519KeyPair() + + // initiator: new XX noise session + const nsInit = new XXHandshakeState({ crypto: wrapCrypto(pureJsCrypto), protocolName, prologue, initiator: true, s: kpInit }) + // responder: new XX noise session + const nsResp = new XXHandshakeState({ crypto: wrapCrypto(pureJsCrypto), protocolName, prologue, initiator: false, s: kpResp }) + + /* STAGE 0 */ + + // initiator sends message + // responder receives message + nsResp.readMessageA(new Uint8ArrayList(nsInit.writeMessageA(ZEROLEN))) + + /* STAGE 1 */ + + // responder sends message + // initiator receives message + nsInit.readMessageB(new Uint8ArrayList(nsResp.writeMessageB(ZEROLEN))) + + /* STAGE 2 */ + + // initiator sends message + // responder receives message + nsResp.readMessageC(new Uint8ArrayList(nsInit.writeMessageC(ZEROLEN))) + + const nsInitSplit = nsInit.ss.split() + const nsRespSplit = nsResp.ss.split() + + assert(uint8ArrayEquals(nsInitSplit[0].k!, nsRespSplit[0].k!)) + + assert(uint8ArrayEquals(nsInitSplit[1].k!, nsRespSplit[1].k!)) + + return { + nsInit: { ss: nsInit.ss, cs1: nsInitSplit[0], cs2: nsInitSplit[1] }, + nsResp: { ss: nsResp.ss, cs1: nsRespSplit[0], cs2: nsRespSplit[1] } + } + } + + it('Test symmetric encrypt and decrypt', async () => { + try { + const { nsInit, nsResp } = await doHandshake() + const ad = Buffer.from('authenticated') + const message = Buffer.from('HelloCrypto') + + const ciphertext = nsInit.cs1.encryptWithAd(ad, message) + assert(!uint8ArrayEquals(Buffer.from('HelloCrypto'), ciphertext.subarray()), 'Encrypted message should not be same as plaintext.') + const decrypted = nsResp.cs1.decryptWithAd(ad, ciphertext) + + assert(uint8ArrayEquals(Buffer.from('HelloCrypto'), decrypted.subarray()), 'Decrypted text not equal to original message.') + } catch (e) { + assert(false, (e as Error).message) + } + }) + + it('Test multiple messages encryption and decryption', async () => { + const { nsInit, nsResp } = await doHandshake() + const ad = Buffer.from('authenticated') + + for (let i = 0; i < 50; i++) { + const strMessage = 'ethereum' + String(i) + const message = Buffer.from(strMessage) + { + const encrypted = nsInit.cs1.encryptWithAd(ad, message) + const decrypted = nsResp.cs1.decryptWithAd(ad, encrypted) + assert.equal(strMessage, uint8ArrayToString(decrypted.subarray(), 'utf8'), 'Decrypted text not equal to original message.') + } + { + const encrypted = nsResp.cs2.encryptWithAd(ad, message) + const decrypted = nsInit.cs2.decryptWithAd(ad, encrypted) + assert.equal(strMessage, uint8ArrayToString(decrypted.subarray(), 'utf8'), 'Decrypted text not equal to original message.') + } + } + }) +}) diff --git a/packages/connection-encrypter-noise/test/utils.ts b/packages/connection-encrypter-noise/test/utils.ts new file mode 100644 index 0000000000..c342022a1d --- /dev/null +++ b/packages/connection-encrypter-noise/test/utils.ts @@ -0,0 +1,6 @@ +import { keys } from '@libp2p/crypto' +import type { PrivateKey } from '@libp2p/interface' + +export async function generateEd25519Keys (): Promise { + return keys.generateKeyPair('Ed25519', 32) +} diff --git a/packages/connection-encrypter-noise/tsconfig.json b/packages/connection-encrypter-noise/tsconfig.json new file mode 100644 index 0000000000..13a3599639 --- /dev/null +++ b/packages/connection-encrypter-noise/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ] +} diff --git a/packages/connection-encrypter-noise/typedoc.json b/packages/connection-encrypter-noise/typedoc.json new file mode 100644 index 0000000000..db0b0747ef --- /dev/null +++ b/packages/connection-encrypter-noise/typedoc.json @@ -0,0 +1,6 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts" + ] +} diff --git a/packages/connection-encrypter-plaintext/package.json b/packages/connection-encrypter-plaintext/package.json index 4949a815eb..62fd28f910 100644 --- a/packages/connection-encrypter-plaintext/package.json +++ b/packages/connection-encrypter-plaintext/package.json @@ -48,7 +48,7 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/interface": "^2.10.5", "@libp2p/peer-id": "^5.1.8", - "it-protobuf-stream": "^2.0.2", + "@libp2p/utils": "^6.7.1", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" @@ -57,7 +57,6 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", - "it-pair": "^2.0.6", "protons": "^7.6.1", "sinon": "^20.0.0" }, diff --git a/packages/connection-encrypter-plaintext/src/index.ts b/packages/connection-encrypter-plaintext/src/index.ts index ba2b0796d9..9f6777577b 100644 --- a/packages/connection-encrypter-plaintext/src/index.ts +++ b/packages/connection-encrypter-plaintext/src/index.ts @@ -24,10 +24,10 @@ import { publicKeyFromRaw } from '@libp2p/crypto/keys' import { UnexpectedPeerError, InvalidCryptoExchangeError, serviceCapabilities, ProtocolError } from '@libp2p/interface' import { peerIdFromPublicKey } from '@libp2p/peer-id' -import { pbStream } from 'it-protobuf-stream' +import { pbStream } from '@libp2p/utils' import { equals as uint8ArrayEquals } from 'uint8arrays/equals' import { Exchange, KeyType } from './pb/proto.js' -import type { ComponentLogger, Logger, MultiaddrConnection, ConnectionEncrypter, SecuredConnection, PrivateKey, SecureConnectionOptions, SecurableStream } from '@libp2p/interface' +import type { ComponentLogger, Logger, MultiaddrConnection, ConnectionEncrypter, SecuredConnection, PrivateKey, SecureConnectionOptions, MessageStream } from '@libp2p/interface' const PROTOCOL = '/plaintext/2.0.0' @@ -52,20 +52,20 @@ class Plaintext implements ConnectionEncrypter { '@libp2p/connection-encryption' ] - async secureInbound(conn: Stream, options?: SecureConnectionOptions): Promise> { - return this._encrypt(conn, options) + async secureInbound(connection: Stream, options?: SecureConnectionOptions): Promise { + return this._encrypt(connection, options) } - async secureOutbound(conn: Stream, options?: SecureConnectionOptions): Promise> { - return this._encrypt(conn, options) + async secureOutbound(connection: Stream, options?: SecureConnectionOptions): Promise { + return this._encrypt(connection, options) } /** * Encrypt connection */ - async _encrypt(conn: Stream, options?: SecureConnectionOptions): Promise> { - const log = conn.log?.newScope('plaintext') ?? this.log - const pb = pbStream(conn).pb(Exchange) + async _encrypt(connection: Stream, options?: SecureConnectionOptions): Promise { + const log = connection.log?.newScope('plaintext') ?? this.log + const pb = pbStream(connection).pb(Exchange) log('write pubkey exchange to peer %p', options?.remotePeer) @@ -118,7 +118,7 @@ class Plaintext implements ConnectionEncrypter { log('plaintext key exchange completed successfully with peer %p', peerId) return { - conn: pb.unwrap().unwrap(), + connection: pb.unwrap().unwrap(), remotePeer: peerId } } diff --git a/packages/connection-encrypter-plaintext/test/index.spec.ts b/packages/connection-encrypter-plaintext/test/index.spec.ts index 7a0a4eacce..51d71eef9a 100644 --- a/packages/connection-encrypter-plaintext/test/index.spec.ts +++ b/packages/connection-encrypter-plaintext/test/index.spec.ts @@ -3,8 +3,8 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' -import { duplexPair } from 'it-pair/duplex' import sinon from 'sinon' import { plaintext } from '../src/index.js' import type { ConnectionEncrypter, PeerId } from '@libp2p/interface' @@ -36,7 +36,7 @@ describe('plaintext', () => { }) it('should verify the public key and id match', async () => { - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() await Promise.all([ encrypter.secureInbound(inbound), @@ -57,7 +57,7 @@ describe('plaintext', () => { logger: defaultLogger() }) - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() await expect(Promise.all([ encrypter.secureInbound(inbound), diff --git a/packages/connection-encrypter-tls/package.json b/packages/connection-encrypter-tls/package.json index a94eb10b30..b09add7286 100644 --- a/packages/connection-encrypter-tls/package.json +++ b/packages/connection-encrypter-tls/package.json @@ -44,13 +44,12 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/interface": "^2.10.5", "@libp2p/peer-id": "^5.1.8", + "@libp2p/utils": "^6.7.1", "@peculiar/asn1-schema": "^2.3.15", "@peculiar/asn1-x509": "^2.3.15", "@peculiar/webcrypto": "^1.5.0", "@peculiar/x509": "^1.12.3", "asn1js": "^3.0.6", - "it-queueless-pushable": "^2.0.1", - "it-stream-types": "^2.0.2", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" @@ -58,8 +57,8 @@ "devDependencies": { "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", - "it-pair": "^2.0.6", "protons": "^7.6.1", + "race-event": "^1.6.1", "sinon": "^20.0.0", "sinon-ts": "^2.0.0" }, diff --git a/packages/connection-encrypter-tls/src/tls.browser.ts b/packages/connection-encrypter-tls/src/tls.browser.ts index a7078170cf..98347dd65f 100644 --- a/packages/connection-encrypter-tls/src/tls.browser.ts +++ b/packages/connection-encrypter-tls/src/tls.browser.ts @@ -1,8 +1,6 @@ import { serviceCapabilities } from '@libp2p/interface' import { PROTOCOL } from './index.js' -import type { MultiaddrConnection, ConnectionEncrypter, SecuredConnection, SecureConnectionOptions } from '@libp2p/interface' -import type { Duplex } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { MultiaddrConnection, ConnectionEncrypter, SecuredConnection, SecureConnectionOptions, MessageStream } from '@libp2p/interface' export class TLS implements ConnectionEncrypter { public protocol: string = PROTOCOL @@ -17,11 +15,11 @@ export class TLS implements ConnectionEncrypter { '@libp2p/connection-encryption' ] - async secureInbound > = MultiaddrConnection> (conn: Stream, options?: SecureConnectionOptions): Promise> { + async secureInbound (conn: Stream, options?: SecureConnectionOptions): Promise> { throw new Error('TLS encryption is not possible in browsers') } - async secureOutbound > = MultiaddrConnection> (conn: Stream, options?: SecureConnectionOptions): Promise> { + async secureOutbound (conn: Stream, options?: SecureConnectionOptions): Promise> { throw new Error('TLS encryption is not possible in browsers') } } diff --git a/packages/connection-encrypter-tls/src/tls.ts b/packages/connection-encrypter-tls/src/tls.ts index d88b59c6bf..688865ebdd 100644 --- a/packages/connection-encrypter-tls/src/tls.ts +++ b/packages/connection-encrypter-tls/src/tls.ts @@ -21,10 +21,10 @@ import { TLSSocket, connect } from 'node:tls' import { InvalidCryptoExchangeError, serviceCapabilities } from '@libp2p/interface' import { HandshakeTimeoutError } from './errors.js' -import { generateCertificate, verifyPeerCertificate, itToStream, streamToIt } from './utils.js' +import { generateCertificate, verifyPeerCertificate, toNodeDuplex, toMessageStream } from './utils.js' import { PROTOCOL } from './index.js' import type { TLSComponents } from './index.js' -import type { MultiaddrConnection, ConnectionEncrypter, SecuredConnection, Logger, SecureConnectionOptions, CounterGroup, StreamMuxerFactory, SecurableStream } from '@libp2p/interface' +import type { MultiaddrConnection, ConnectionEncrypter, SecuredConnection, Logger, SecureConnectionOptions, CounterGroup, StreamMuxerFactory, MessageStream } from '@libp2p/interface' import type { TLSSocketOptions } from 'node:tls' export class TLS implements ConnectionEncrypter { @@ -75,19 +75,19 @@ export class TLS implements ConnectionEncrypter { '@libp2p/connection-encryption' ] - async secureInbound (conn: Stream, options?: SecureConnectionOptions): Promise> { - return this._encrypt(conn, true, options) + async secureInbound (connection: Stream, options?: SecureConnectionOptions): Promise { + return this._encrypt(connection, true, options) } - async secureOutbound (conn: Stream, options?: SecureConnectionOptions): Promise> { - return this._encrypt(conn, false, options) + async secureOutbound (connection: Stream, options?: SecureConnectionOptions): Promise { + return this._encrypt(connection, false, options) } /** * Encrypt connection */ - async _encrypt (conn: Stream, isServer: boolean, options?: SecureConnectionOptions): Promise> { - const log = conn.log?.newScope('tls') ?? this.log + async _encrypt (connection: Stream, isServer: boolean, options?: SecureConnectionOptions): Promise { + const log = connection.log?.newScope('tls') ?? this.log let streamMuxer: StreamMuxerFactory | undefined let streamMuxers: string[] = [] @@ -134,14 +134,14 @@ export class TLS implements ConnectionEncrypter { let socket: TLSSocket if (isServer) { - socket = new TLSSocket(itToStream(conn), { + socket = new TLSSocket(toNodeDuplex(connection), { ...opts, // require clients to send certificates requestCert: true }) } else { socket = connect({ - socket: itToStream(conn), + socket: toNodeDuplex(connection), ...opts }) } @@ -176,19 +176,14 @@ export class TLS implements ConnectionEncrypter { const err = new InvalidCryptoExchangeError(`Selected muxer ${socket.alpnProtocol} did not exist`) log.error(`Selected muxer ${socket.alpnProtocol} did not exist - %e`, err) - if (isAbortable(conn)) { - conn.abort(err) - reject(err) - } + connection.abort(err) + reject(err) } } resolve({ remotePeer, - conn: { - ...conn, - ...streamToIt(socket) - }, + connection: toMessageStream(connection, socket), streamMuxer }) }) @@ -210,10 +205,7 @@ export class TLS implements ConnectionEncrypter { } socket.destroy(err) - - if (isAbortable(conn)) { - conn.abort(err) - } + connection.abort(err) reject(err) }) @@ -240,11 +232,3 @@ export class TLS implements ConnectionEncrypter { }) } } - -interface Abortable { - abort (err: Error): void -} - -function isAbortable (obj: T & Partial): obj is T & Abortable { - return typeof obj?.abort === 'function' -} diff --git a/packages/connection-encrypter-tls/src/utils.ts b/packages/connection-encrypter-tls/src/utils.ts index 68596bbe87..882d33f393 100644 --- a/packages/connection-encrypter-tls/src/utils.ts +++ b/packages/connection-encrypter-tls/src/utils.ts @@ -1,22 +1,22 @@ -import { Duplex as DuplexStream } from 'node:stream' +import net from 'node:net' +import { Duplex } from 'node:stream' import { publicKeyFromProtobuf } from '@libp2p/crypto/keys' -import { InvalidCryptoExchangeError, UnexpectedPeerError } from '@libp2p/interface' +import { InvalidCryptoExchangeError, StreamClosedError, UnexpectedPeerError, StreamMessageEvent } from '@libp2p/interface' import { peerIdFromCID } from '@libp2p/peer-id' +import { AbstractMessageStream, socketWriter } from '@libp2p/utils' import { AsnConvert } from '@peculiar/asn1-schema' import * as asn1X509 from '@peculiar/asn1-x509' import { Crypto } from '@peculiar/webcrypto' import * as x509 from '@peculiar/x509' import * as asn1js from 'asn1js' -import { queuelessPushable } from 'it-queueless-pushable' +import { Uint8ArrayList } from 'uint8arraylist' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import { InvalidCertificateError } from './errors.js' import { KeyType, PublicKey } from './pb/index.js' -import type { PeerId, PublicKey as Libp2pPublicKey, Logger, PrivateKey, AbortOptions } from '@libp2p/interface' -import type { Pushable } from 'it-queueless-pushable' -import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { PeerId, PublicKey as Libp2pPublicKey, Logger, PrivateKey, AbortOptions, MessageStream, StreamCloseEvent } from '@libp2p/interface' +import type { SendResult, SocketWriter } from '@libp2p/utils' const crypto = new Crypto() x509.cryptoProvider.set(crypto) @@ -191,123 +191,153 @@ function formatAsPem (str: string): string { return finalString } -export function itToStream (conn: Duplex>): DuplexStream { - const output = queuelessPushable() - const iterator = conn.source[Symbol.asyncIterator]() as AsyncGenerator +export function toNodeDuplex (stream: MessageStream): Duplex { + let sent = 0 - const stream = new DuplexStream({ - autoDestroy: false, - allowHalfOpen: true, + function sendAndCallback (chunk: Uint8Array | Uint8ArrayList, callback: (err?: Error | null) => void): void { + sent += chunk.byteLength + const sendMore = stream.send(chunk) + + if (sendMore) { + callback() + return + } + + socket.pause() + + const cleanUp = (): void => { + stream.removeEventListener('drain', onDrain) + stream.removeEventListener('close', onClose) + stream.removeEventListener('message', onMessage) + } + + const onDrain = (): void => { + cleanUp() + callback() + socket.resume() + } + const onClose = (evt: StreamCloseEvent): void => { + console.info('wat sent', sent) + cleanUp() + + if (evt.error != null) { + callback(evt.error) + } else { + callback(new StreamClosedError('Stream closed before write')) + } + } + + stream.addEventListener('drain', onDrain, { + once: true + }) + stream.addEventListener('close', onClose, { + once: true + }) + } + + // pause incoming messages until pulled from duplex + stream.pause() + + const socket = new Duplex({ write (chunk, encoding, callback) { - output.push(chunk) - .then(() => { - callback() - }, err => { - callback(err) - }) + sendAndCallback(chunk, callback) + }, + writev (chunks, callback) { + sendAndCallback(new Uint8ArrayList(...chunks.map(({ chunk }) => chunk)), callback) }, read () { - iterator.next() - .then(result => { - if (result.done === true) { - this.push(null) - } else { - this.push(result.value) - } - }, (err) => { - this.destroy(err) - }) + stream.resume() + }, + final (cb) { + stream.closeWrite() + .then(() => cb(), (err) => cb(err)) } }) - // @ts-expect-error return type of sink is unknown - conn.sink(output) - .catch((err: any) => { - stream.destroy(err) - }) + // const writer = socketWriter(socket) + + const onMessage = (evt: StreamMessageEvent): void => { + const buf = evt.data + let sendMore = true + + if (buf instanceof Uint8Array) { + sendMore = socket.push(buf) + } else { + for (const chunk of buf) { + sendMore = socket.push(chunk) + } + } - return stream + if (!sendMore) { + stream.pause() + } + } + stream.addEventListener('message', onMessage) + + return socket } -class DuplexIterable implements Duplex> { - source: Pushable - private readonly stream: DuplexStream +class EncryptedMessageStream extends AbstractMessageStream { + private socket: net.Socket + private writer: SocketWriter - constructor (stream: DuplexStream) { - this.stream = stream - this.source = queuelessPushable() + constructor (stream: MessageStream, socket: net.Socket) { + super({ + log: stream.log, + inactivityTimeout: stream.inactivityTimeout, + maxPauseBufferLength: stream.maxPauseBufferLength, + direction: stream.direction + }) - stream.addListener('data', (buf) => { - stream.pause() - this.source.push(buf.subarray()) - .then(() => { - stream.resume() - }, (err) => { - stream.emit('error', err) - }) + this.socket = socket + this.writer = socketWriter(socket) + + this.socket.on('data', (buf) => { + this.onData(buf) + }) + this.socket.on('error', err => { + stream.abort(err) }) - // both ends closed - stream.addListener('close', () => { - this.source.end() + this.socket.on('close', () => { + stream.closeWrite() .catch(err => { - stream.emit('error', err) + stream.abort(err) }) }) - stream.addListener('error', (err) => { - this.source.end(err) - .catch(() => {}) + this.socket.on('drain', () => { + this.safeDispatchEvent('drain') }) - // just writable end closed - stream.addListener('finish', () => { - this.source.end() - .catch(() => {}) - }) - - this.sink = this.sink.bind(this) } - async sink (source: Source): Promise { - try { - for await (const buf of source) { - const sendMore = this.stream.write(buf.subarray()) + sendPause (): void { + this.socket.pause() + } - if (!sendMore) { - await waitForBackpressure(this.stream) - } - } + sendResume (): void { + this.socket.resume() + } - // close writable end - this.stream.end() - } catch (err: any) { - this.stream.destroy(err) - throw err - } + async sendCloseWrite (options?: AbortOptions): Promise { + this.socket.end() + options?.signal?.throwIfAborted() } -} -export function streamToIt (stream: DuplexStream): Duplex> { - return new DuplexIterable(stream) -} + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } -async function waitForBackpressure (stream: DuplexStream): Promise { - await new Promise((resolve, reject) => { - const continueListener = (): void => { - cleanUp() - resolve() - } - const stopListener = (err?: Error): void => { - cleanUp() - reject(err ?? new Error('Stream ended')) - } + sendReset (): void { + this.socket.resetAndDestroy() + } - const cleanUp = (): void => { - stream.removeListener('drain', continueListener) - stream.removeListener('end', stopListener) - stream.removeListener('error', stopListener) + sendData (data: Uint8ArrayList): SendResult { + return { + sentBytes: data.byteLength, + canSendMore: this.writer.write(data) } + } +} - stream.addListener('drain', continueListener) - stream.addListener('end', stopListener) - stream.addListener('error', stopListener) - }) +export function toMessageStream (stream: MessageStream, socket: net.Socket): MessageStream { + return new EncryptedMessageStream(stream, socket) } diff --git a/packages/connection-encrypter-tls/test/index.spec.ts b/packages/connection-encrypter-tls/test/index.spec.ts index dec2532f41..ac74caa5be 100644 --- a/packages/connection-encrypter-tls/test/index.spec.ts +++ b/packages/connection-encrypter-tls/test/index.spec.ts @@ -3,12 +3,12 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { defaultLogger } from '@libp2p/logger' import { peerIdFromMultihash, peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' -import { duplexPair } from 'it-pair/duplex' import sinon from 'sinon' import { stubInterface } from 'sinon-ts' import { tls } from '../src/index.js' -import type { StreamMuxerFactory, ConnectionEncrypter, PeerId, Upgrader, MultiaddrConnection } from '@libp2p/interface' +import type { StreamMuxerFactory, ConnectionEncrypter, PeerId, Upgrader } from '@libp2p/interface' describe('tls', () => { let localPeer: PeerId @@ -43,19 +43,13 @@ describe('tls', () => { }) it('should verify the public key and id match', async () => { - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() await Promise.all([ - encrypter.secureInbound(stubInterface({ - ...inbound, - log: defaultLogger().forComponent('inbound') - }), { + encrypter.secureInbound(inbound, { remotePeer }), - encrypter.secureOutbound(stubInterface({ - ...outbound, - log: defaultLogger().forComponent('outbound') - }), { + encrypter.secureOutbound(outbound, { remotePeer: wrongPeer }) ]).then(() => expect.fail('should have failed'), (err) => { @@ -79,19 +73,13 @@ describe('tls', () => { }) }) - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() await expect(Promise.all([ - encrypter.secureInbound(stubInterface({ - ...inbound, - log: defaultLogger().forComponent('inbound') - }), { + encrypter.secureInbound(inbound, { remotePeer }), - encrypter.secureOutbound(stubInterface({ - ...outbound, - log: defaultLogger().forComponent('outbound') - }), { + encrypter.secureOutbound(outbound, { remotePeer: localPeer }) ])) @@ -99,19 +87,13 @@ describe('tls', () => { }) it('should select an early muxer', async () => { - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() const result = await Promise.all([ - encrypter.secureInbound(stubInterface({ - ...inbound, - log: defaultLogger().forComponent('inbound') - }), { + encrypter.secureInbound(inbound, { remotePeer: localPeer }), - encrypter.secureOutbound(stubInterface({ - ...outbound, - log: defaultLogger().forComponent('outbound') - }), { + encrypter.secureOutbound(outbound, { remotePeer: localPeer }) ]) @@ -121,20 +103,14 @@ describe('tls', () => { }) it('should not select an early muxer when it is skipped', async () => { - const [inbound, outbound] = duplexPair() + const [inbound, outbound] = await streamPair() const result = await Promise.all([ - encrypter.secureInbound(stubInterface({ - ...inbound, - log: defaultLogger().forComponent('inbound') - }), { + encrypter.secureInbound(inbound, { remotePeer: localPeer, skipStreamMuxerNegotiation: true }), - encrypter.secureOutbound(stubInterface({ - ...outbound, - log: defaultLogger().forComponent('outbound') - }), { + encrypter.secureOutbound(outbound, { remotePeer: localPeer, skipStreamMuxerNegotiation: true }) diff --git a/packages/connection-encrypter-tls/test/utils.spec.ts b/packages/connection-encrypter-tls/test/utils.spec.ts index 14768c584b..30c4e135e5 100644 --- a/packages/connection-encrypter-tls/test/utils.spec.ts +++ b/packages/connection-encrypter-tls/test/utils.spec.ts @@ -1,9 +1,15 @@ +import { EventEmitter } from 'node:events' +import net from 'node:net' import { logger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { Crypto } from '@peculiar/webcrypto' import * as x509 from '@peculiar/x509' import { expect } from 'aegir/chai' -import { verifyPeerCertificate } from '../src/utils.js' +import { raceEvent } from 'race-event' +import { stubInterface } from 'sinon-ts' +import { toMessageStream, toNodeDuplex, verifyPeerCertificate } from '../src/utils.js' import * as testVectors from './fixtures/test-vectors.js' +import { Uint8ArrayList } from 'uint8arraylist' const crypto = new Crypto() x509.cryptoProvider.set(crypto) @@ -74,4 +80,72 @@ describe('utils', () => { await expect(verifyPeerCertificate(new Uint8Array(cert.rawData), undefined, logger('libp2p'))).to.eventually.be.rejected .with.property('name', 'InvalidCryptoExchangeError') }) + + it('should pipe stream messages to socket', async () => { + const [outboundStream, inboundStream] = await streamPair() + const [outboundSocket, inboundSocket] = [toNodeDuplex(outboundStream), toNodeDuplex(inboundStream)] + + const toSend = new Array(1_000).fill(0).map(() => { + return Uint8Array.from(new Array(1_000).fill(0)) + }) + + let received = 0 + + inboundSocket.addListener('data', (buf) => { + received += buf.byteLength + }) + + let sent = 0 + + for (const buf of toSend) { + const sendMore = outboundSocket.write(buf) + sent += buf.byteLength + + if (sendMore === false) { + await raceEvent(outboundSocket, 'drain') + } + } + + outboundSocket.end() + inboundSocket.end() + + await Promise.all([ + raceEvent(outboundStream, 'close'), + raceEvent(inboundStream, 'close') + ]) + + expect(received).to.deep.equal(sent) + }) + + it('should pipe socket messages to stream', async () => { + const [outboundStream, inboundStream] = await streamPair() + const emitter = new EventEmitter() + + // close writable end of inbound stream + await inboundStream.closeWrite() + + // @ts-expect-error return types of emitter methods are incompatible + const socket = stubInterface(emitter) + const stream = toMessageStream(outboundStream, socket) + + const sent = new Array(1_000).fill(0).map(() => { + return Uint8Array.from(new Array(1_000).fill(0)) + }) + + const received: Array = [] + + stream.addEventListener('message', (evt) => { + received.push(evt.data) + }) + + for (const buf of sent) { + emitter.emit('data', buf) + } + + emitter.emit('close') + + await raceEvent(outboundStream, 'close') + + expect(received).to.deep.equal(sent) + }) }) diff --git a/packages/integration-tests/.aegir.js b/packages/integration-tests/.aegir.js index 0626837785..11e66a53af 100644 --- a/packages/integration-tests/.aegir.js +++ b/packages/integration-tests/.aegir.js @@ -19,7 +19,7 @@ export default { const { circuitRelayServer, circuitRelayTransport } = await import('@libp2p/circuit-relay-v2') const { identify } = await import('@libp2p/identify') const { echo } = await import('@libp2p/echo') - const { mockMuxer } = await import('@libp2p/interface-compliance-tests/mocks') + const { mockMuxer } = await import('@libp2p/utils') const { ping } = await import('@libp2p/ping') const { prefixLogger } = await import('@libp2p/logger') const { webRTCDirect } = await import('@libp2p/webrtc') @@ -113,7 +113,6 @@ export default { } }) - const goLibp2pRelay = await createGoLibp2pRelay() const wsAddresses = libp2p.getMultiaddrs().filter(ma => WebSockets.exactMatch(ma)) const webRTCDirectPorts = new Set() @@ -153,9 +152,9 @@ export default { } }, after: async (_, before) => { - await before.libp2p.stop() - await before.goLibp2pRelay.proc.kill() - await before.libp2pLimitedRelay.stop() + await before.libp2p?.stop() + await before.goLibp2pRelay?.proc.kill() + await before.libp2pLimitedRelay?.stop() } } } diff --git a/packages/integration-tests/package.json b/packages/integration-tests/package.json index d582ef0a38..91b113f638 100644 --- a/packages/integration-tests/package.json +++ b/packages/integration-tests/package.json @@ -67,8 +67,6 @@ "execa": "^9.5.3", "go-libp2p": "^1.6.0", "it-all": "^3.0.8", - "it-map": "^3.1.3", - "it-pipe": "^3.0.1", "libp2p": "^2.8.8", "main-event": "^1.0.1", "multiformats": "^13.3.6", diff --git a/packages/integration-tests/test/circuit-relay.node.ts b/packages/integration-tests/test/circuit-relay.node.ts index c2df08683d..10841107cb 100644 --- a/packages/integration-tests/test/circuit-relay.node.ts +++ b/packages/integration-tests/test/circuit-relay.node.ts @@ -7,12 +7,12 @@ import { identify } from '@libp2p/identify' import { mplex } from '@libp2p/mplex' import { plaintext } from '@libp2p/plaintext' import { tcp } from '@libp2p/tcp' +import { echo } from '@libp2p/utils' import { Circuit } from '@multiformats/mafmt' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import delay from 'delay' import all from 'it-all' -import { pipe } from 'it-pipe' import { createLibp2p } from 'libp2p' import defer from 'p-defer' import pRetry from 'p-retry' @@ -85,10 +85,8 @@ const ECHO_PROTOCOL = '/test/echo/1.0.0' const echoService = (components: EchoServiceComponents): unknown => { return { async start () { - await components.registrar.handle(ECHO_PROTOCOL, ({ stream }) => { - void pipe( - stream, stream - ) + await components.registrar.handle(ECHO_PROTOCOL, (stream) => { + echo(stream) }, { runOnLimitedConnection: true }) @@ -580,8 +578,8 @@ describe('circuit-relay', () => { const protocol = '/my-protocol/1.0.0' // remote registers handler, disallow running over limited connections - await remote.handle(protocol, ({ stream }) => { - void pipe(stream, stream) + await remote.handle(protocol, (stream) => { + echo(stream) }, { runOnLimitedConnection: false }) @@ -607,8 +605,8 @@ describe('circuit-relay', () => { const protocol = '/my-protocol/1.0.0' // remote registers handler, allow running over limited streams - await remote.handle(protocol, ({ stream }) => { - void pipe(stream, stream) + await remote.handle(protocol, (stream) => { + echo(stream) }, { runOnLimitedConnection: true }) @@ -675,29 +673,24 @@ describe('circuit-relay', () => { // set up an echo server on the remote const protocol = '/test/protocol/1.0.0' - await remote.handle(protocol, ({ stream }) => { - void Promise.resolve().then(async () => { - try { - for await (const buf of stream.source) { - transferred.append(buf) - } - } catch {} + await remote.handle(protocol, (stream) => { + stream.addEventListener('message', (evt) => { + transferred.append(evt.data) }) }) // dial the remote from the local through the relay const ma = getRelayAddress(remote) + const stream = await local.dialProtocol(ma, protocol) - try { - const stream = await local.dialProtocol(ma, protocol) - - await stream.sink(async function * () { - while (true) { - await delay(100) - yield new Uint8Array(2048) - } - }()) - } catch {} + Promise.resolve().then(async () => { + while (true) { + await delay(100) + stream.send(new Uint8Array(2048)) + } + }).catch(() => { + // writing to a closed stream will throw so swallow the error + }) // we cannot be exact about this figure because mss, encryption and other // protocols all send data over connections when they are opened @@ -749,13 +742,9 @@ describe('circuit-relay', () => { // set up an echo server on the remote const protocol = '/test/protocol/1.0.0' - await remote.handle(protocol, ({ stream }) => { - void Promise.resolve().then(async () => { - try { - for await (const buf of stream.source) { - transferred.append(buf) - } - } catch {} + await remote.handle(protocol, (stream) => { + stream.addEventListener('message', (evt) => { + transferred.append(evt.data) }) }, { runOnLimitedConnection: true @@ -764,19 +753,19 @@ describe('circuit-relay', () => { // dial the remote from the local through the relay const ma = getRelayAddress(remote) - try { - const stream = await local.dialProtocol(ma, protocol, { - runOnLimitedConnection: true - }) + const stream = await local.dialProtocol(ma, protocol, { + runOnLimitedConnection: true + }) - await stream.sink(async function * () { - while (true) { - await delay(100) - yield new Uint8Array(10) - await delay(5000) - } - }()) - } catch {} + Promise.resolve().then(async () => { + while (true) { + await delay(100) + stream.send(new Uint8Array(10)) + await delay(5000) + } + }).catch(() => { + // writing to a closed stream will throw so swallow the error + }) expect(transferred.byteLength).to.equal(10) }) @@ -910,11 +899,10 @@ describe('circuit-relay', () => { // write more than the default data limit const data = new Uint8Array(Number(DEFAULT_DATA_LIMIT * 2n)) - const result = await pipe( - [data], - stream, - async (source) => new Uint8ArrayList(...(await all(source))) - ) + stream.send(data) + await stream.closeWrite() + + const result = new Uint8ArrayList(...(await all(stream))) expect(result.subarray()).to.equalBytes(data) }) @@ -935,20 +923,15 @@ describe('circuit-relay', () => { const start = Date.now() let finish = 0 - await pipe( - async function * () { - while (true) { - yield new Uint8Array() - await delay(10) + while (true) { + stream.send(new Uint8Array()) + await delay(10) - if (finished) { - finish = Date.now() - break - } - } - }, - stream - ) + if (finished) { + finish = Date.now() + break + } + } // default time limit is set to 100ms so the stream should have been open // for longer than that diff --git a/packages/interface-compliance-tests/test/mocks/muxer.spec.ts b/packages/integration-tests/test/compliance/muxer/mock-muxer.spec.ts similarity index 58% rename from packages/interface-compliance-tests/test/mocks/muxer.spec.ts rename to packages/integration-tests/test/compliance/muxer/mock-muxer.spec.ts index 80418c2c72..9db13b079e 100644 --- a/packages/interface-compliance-tests/test/mocks/muxer.spec.ts +++ b/packages/integration-tests/test/compliance/muxer/mock-muxer.spec.ts @@ -1,5 +1,5 @@ -import { mockMuxer } from '../../src/mocks/muxer.js' -import tests from '../../src/stream-muxer/index.js' +import tests from '@libp2p/interface-compliance-tests/stream-muxer' +import { mockMuxer } from '@libp2p/utils' describe('mock stream muxer compliance tests', () => { tests({ diff --git a/packages/integration-tests/test/connections.spec.ts b/packages/integration-tests/test/connections.spec.ts index 241bdc509b..7ca544d15b 100644 --- a/packages/integration-tests/test/connections.spec.ts +++ b/packages/integration-tests/test/connections.spec.ts @@ -375,7 +375,7 @@ describe('connections', () => { const protocol = '/test/1.0.0' const streamOpen = pDefer() - await listener.handle(protocol, ({ stream }) => { + await listener.handle(protocol, (stream) => { streamOpen.resolve(stream) }) @@ -392,7 +392,7 @@ describe('connections', () => { const protocol = '/test/1.0.0' const streamOpen = pDefer() - await listener.handle(protocol, ({ stream }) => { + await listener.handle(protocol, (stream) => { streamOpen.resolve(stream) }) diff --git a/packages/integration-tests/test/fixtures/base-options.browser.ts b/packages/integration-tests/test/fixtures/base-options.browser.ts index d3a2cff4e5..29656bc827 100644 --- a/packages/integration-tests/test/fixtures/base-options.browser.ts +++ b/packages/integration-tests/test/fixtures/base-options.browser.ts @@ -3,7 +3,7 @@ import { circuitRelayTransport } from '@libp2p/circuit-relay-v2' import { identify } from '@libp2p/identify' import { mplex } from '@libp2p/mplex' import { plaintext } from '@libp2p/plaintext' -import { mergeOptions } from '@libp2p/utils/merge-options' +import { mergeOptions } from '@libp2p/utils' import { webRTC } from '@libp2p/webrtc' import { webSockets } from '@libp2p/websockets' import { isWebWorker } from 'wherearewe' diff --git a/packages/integration-tests/test/fixtures/base-options.ts b/packages/integration-tests/test/fixtures/base-options.ts index d14642b955..f95c643b45 100644 --- a/packages/integration-tests/test/fixtures/base-options.ts +++ b/packages/integration-tests/test/fixtures/base-options.ts @@ -4,7 +4,7 @@ import { identify } from '@libp2p/identify' import { mplex } from '@libp2p/mplex' import { plaintext } from '@libp2p/plaintext' import { tcp } from '@libp2p/tcp' -import { mergeOptions } from '@libp2p/utils/merge-options' +import { mergeOptions } from '@libp2p/utils' import { webRTC } from '@libp2p/webrtc' import { webSockets } from '@libp2p/websockets' import type { ServiceMap } from '@libp2p/interface' diff --git a/packages/integration-tests/test/fixtures/slow-muxer.ts b/packages/integration-tests/test/fixtures/slow-muxer.ts deleted file mode 100644 index c98c59de47..0000000000 --- a/packages/integration-tests/test/fixtures/slow-muxer.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* eslint-env mocha */ - -import { yamux } from '@chainsafe/libp2p-yamux' -import delay from 'delay' -import map from 'it-map' -import type { StreamMuxerFactory } from '@libp2p/interface' - -/** - * Creates a muxer with a delay between each sent packet - */ -export function slowMuxer (packetDelay: number): ((components: any) => StreamMuxerFactory) { - return (components) => { - const muxerFactory = yamux()(components) - const originalCreateStreamMuxer = muxerFactory.createStreamMuxer.bind(muxerFactory) - - muxerFactory.createStreamMuxer = (init) => { - const muxer = originalCreateStreamMuxer(init) - muxer.source = map(muxer.source, async (buf) => { - await delay(packetDelay) - return buf - }) - - return muxer - } - - return muxerFactory - } -} diff --git a/packages/integration-tests/test/identify.node.ts b/packages/integration-tests/test/identify.node.ts index a17f8efb91..3313f179c7 100644 --- a/packages/integration-tests/test/identify.node.ts +++ b/packages/integration-tests/test/identify.node.ts @@ -156,7 +156,7 @@ describe('identify', () => { expect(clientPeer.addresses[0].multiaddr.toString()).to.equal(announceAddrs[0].toString()) expect(clientPeer.addresses[1].multiaddr.toString()).to.equal(announceAddrs[1].toString()) - await stream.close() + await stream.closeWrite() await connection.close() await receiver.stop() await sender.stop() diff --git a/packages/integration-tests/test/ping.spec.ts b/packages/integration-tests/test/ping.spec.ts index fd8a94d556..e2a8c7d3ce 100644 --- a/packages/integration-tests/test/ping.spec.ts +++ b/packages/integration-tests/test/ping.spec.ts @@ -3,9 +3,7 @@ import { ping, PING_PROTOCOL } from '@libp2p/ping' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import { pipe } from 'it-pipe' import { createLibp2p } from 'libp2p' -import pDefer from 'p-defer' import { createBaseOptions } from './fixtures/base-options.js' import type { Libp2p } from '@libp2p/interface' import type { PingService } from '@libp2p/ping' @@ -60,22 +58,11 @@ describe('ping', () => { }) it('only waits for the first response to arrive', async () => { - const defer = pDefer() - await nodes[1].unhandle(PING_PROTOCOL) - await nodes[1].handle(PING_PROTOCOL, ({ stream }) => { - void pipe( - stream, - async function * (stream) { - for await (const data of stream) { - yield data - - // something longer than the test timeout - await defer.promise - } - }, - stream - ) + await nodes[1].handle(PING_PROTOCOL, (stream) => { + stream.addEventListener('message', (evt) => { + stream.send(evt.data) + }) }, { runOnLimitedConnection: true }) @@ -83,8 +70,6 @@ describe('ping', () => { const latency = await nodes[0].services.ping.ping(nodes[1].getMultiaddrs()) expect(latency).to.be.a('Number') - - defer.resolve() }) it('allows two incoming streams from the same peer', async () => { diff --git a/packages/interface-compliance-tests/package.json b/packages/interface-compliance-tests/package.json index d3794b95ba..47cbcf3c63 100644 --- a/packages/interface-compliance-tests/package.json +++ b/packages/interface-compliance-tests/package.json @@ -52,26 +52,10 @@ "types": "./dist/src/connection-encryption/index.d.ts", "import": "./dist/src/connection-encryption/index.js" }, - "./is-valid-tick": { - "types": "./dist/src/is-valid-tick.d.ts", - "import": "./dist/src/is-valid-tick.js" - }, - "./matchers": { - "types": "./dist/src/matchers.d.ts", - "import": "./dist/src/matchers.js" - }, - "./mocks": { - "types": "./dist/src/mocks/index.d.ts", - "import": "./dist/src/mocks/index.js" - }, "./peer-discovery": { "types": "./dist/src/peer-discovery/index.d.ts", "import": "./dist/src/peer-discovery/index.js" }, - "./pubsub": { - "types": "./dist/src/pubsub/index.d.ts", - "import": "./dist/src/pubsub/index.js" - }, "./stream-muxer": { "types": "./dist/src/stream-muxer/index.d.ts", "import": "./dist/src/stream-muxer/index.js" @@ -101,38 +85,26 @@ "@libp2p/echo": "^2.1.27", "@libp2p/interface": "^2.10.5", "@libp2p/interface-internal": "^2.3.18", - "@libp2p/logger": "^5.1.21", "@libp2p/memory": "^1.1.13", - "@libp2p/multistream-select": "^6.0.28", - "@libp2p/peer-collections": "^6.0.34", "@libp2p/peer-id": "^5.1.8", "@libp2p/plaintext": "^2.0.28", "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "@multiformats/multiaddr-matcher": "^2.0.0", - "abortable-iterator": "^5.1.0", "aegir": "^47.0.14", - "any-signal": "^4.1.1", "delay": "^6.0.0", "it-all": "^3.0.8", - "it-byte-stream": "^2.0.2", "it-drain": "^3.0.9", "it-map": "^3.1.3", - "it-ndjson": "^1.1.3", - "it-pair": "^2.0.6", - "it-pipe": "^3.0.1", - "it-protobuf-stream": "^2.0.2", "it-pushable": "^3.2.3", - "it-stream-types": "^2.0.2", "it-to-buffer": "^4.0.9", "libp2p": "^2.9.0", - "main-event": "^1.0.1", "p-defer": "^4.0.1", "p-event": "^6.0.1", - "p-limit": "^6.2.0", "p-retry": "^6.2.1", "p-wait-for": "^5.0.2", "protons-runtime": "^5.5.0", + "race-event": "^1.6.1", "race-signal": "^1.1.3", "sinon": "^20.0.0", "uint8arraylist": "^2.4.8", diff --git a/packages/interface-compliance-tests/src/connection-encryption/index.ts b/packages/interface-compliance-tests/src/connection-encryption/index.ts index 4fc5901b74..711b1644ab 100644 --- a/packages/interface-compliance-tests/src/connection-encryption/index.ts +++ b/packages/interface-compliance-tests/src/connection-encryption/index.ts @@ -1,13 +1,13 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { multiaddrConnectionPair, echo } from '@libp2p/utils' import { expect } from 'aegir/chai' -import all from 'it-all' -import { pipe } from 'it-pipe' import toBuffer from 'it-to-buffer' +import { pEvent } from 'p-event' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { createMaConnPair } from './utils/index.js' import type { TestSetup } from '../index.js' import type { ConnectionEncrypter, PeerId, PrivateKey } from '@libp2p/interface' +import type { Uint8ArrayList } from 'uint8arraylist' export interface ConnectionEncrypterSetupArgs { privateKey: PrivateKey @@ -47,39 +47,47 @@ export default (common: TestSetup { - const [localConn, remoteConn] = createMaConnPair() + const [localConn, remoteConn] = multiaddrConnectionPair() const [ inboundResult, outboundResult ] = await Promise.all([ - cryptoRemote.secureInbound(localConn), - crypto.secureOutbound(remoteConn, { + cryptoRemote.secureInbound(remoteConn), + crypto.secureOutbound(localConn, { remotePeer }) ]) // Echo server - void pipe(inboundResult.conn, inboundResult.conn) + echo(inboundResult.connection).catch(() => {}) - const input = new Array(10_000).fill(0).map((val, index) => { - return uint8ArrayFromString(`data to encrypt, chunk ${index}`) + // Send some data and collect the result + const output: Array = [] + + outboundResult.connection.addEventListener('message', (evt) => { + output.push(evt.data) }) - // Send some data and collect the result - const result = await pipe( - async function * () { - yield * input - }, - outboundResult.conn, - async (source) => all(source) - ) - - expect(toBuffer(result.map(b => b.subarray()))).to.equalBytes(toBuffer(input)) + const input: Uint8Array[] = [] + + for (let i = 0; i < 10_000; i++) { + const buf = uint8ArrayFromString(`data to encrypt, chunk ${i}`) + + if (!outboundResult.connection.send(buf)) { + await pEvent(outboundResult.connection, 'drain') + } + + input.push(buf) + } + + await outboundResult.connection.closeWrite() + + expect(toBuffer(output.map(b => b.subarray()))).to.equalBytes(toBuffer(input)) }) it('should return the remote peer id', async () => { - const [localConn, remoteConn] = createMaConnPair() + const [remoteConn, localConn] = multiaddrConnectionPair() const [ inboundResult, @@ -98,7 +106,7 @@ export default (common: TestSetup { - const [localConn, remoteConn] = createMaConnPair() + const [remoteConn, localConn] = multiaddrConnectionPair() await Promise.all([ cryptoRemote.secureInbound(localConn, { diff --git a/packages/interface-compliance-tests/src/connection-encryption/utils/index.ts b/packages/interface-compliance-tests/src/connection-encryption/utils/index.ts deleted file mode 100644 index f5258a1b6b..0000000000 --- a/packages/interface-compliance-tests/src/connection-encryption/utils/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { logger } from '@libp2p/logger' -import { multiaddr } from '@multiformats/multiaddr' -import { duplexPair } from 'it-pair/duplex' -import type { MultiaddrConnection } from '@libp2p/interface' -import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' - -export function createMaConnPair (): [MultiaddrConnection, MultiaddrConnection] { - const [local, remote] = duplexPair() - - function duplexToMaConn (duplex: Duplex, Source, Promise>): MultiaddrConnection { - const output: MultiaddrConnection = { - ...duplex, - close: async () => {}, - abort: () => {}, - remoteAddr: multiaddr('/ip4/127.0.0.1/tcp/4001'), - timeline: { - open: Date.now() - }, - log: logger('duplex-maconn') - } - - return output - } - - return [duplexToMaConn(local), duplexToMaConn(remote)] -} diff --git a/packages/interface-compliance-tests/src/matchers.ts b/packages/interface-compliance-tests/src/matchers.ts deleted file mode 100644 index a36a603a2a..0000000000 --- a/packages/interface-compliance-tests/src/matchers.ts +++ /dev/null @@ -1,18 +0,0 @@ -import Sinon from 'sinon' -import type { PeerId } from '@libp2p/interface' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { SinonMatcher } from 'sinon' - -/** - * @deprecated PeerIds can be passed to sinon matchers directly - */ -export function matchPeerId (peerId: PeerId): SinonMatcher { - return Sinon.match(p => p.toString() === peerId.toString()) -} - -/** - * @deprecated Multiaddrs can be passed to sinon matchers directly - */ -export function matchMultiaddr (ma: Multiaddr): SinonMatcher { - return Sinon.match(m => m.toString() === ma.toString()) -} diff --git a/packages/interface-compliance-tests/src/mocks/connection-manager.ts b/packages/interface-compliance-tests/src/mocks/connection-manager.ts deleted file mode 100644 index 6cfaa93595..0000000000 --- a/packages/interface-compliance-tests/src/mocks/connection-manager.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { UnsupportedOperationError, isPeerId } from '@libp2p/interface' -import { PeerMap } from '@libp2p/peer-collections' -import { peerIdFromString } from '@libp2p/peer-id' -import { isMultiaddr } from '@multiformats/multiaddr' -import { connectionPair } from './connection.js' -import type { PrivateKey, PeerId, ComponentLogger, Libp2pEvents, PendingDial, Connection, PubSub, Startable } from '@libp2p/interface' -import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { AbortOptions } from 'it-pushable' -import type { TypedEventTarget } from 'main-event' - -export interface MockNetworkComponents { - peerId: PeerId - privateKey: PrivateKey - registrar: Registrar - connectionManager: ConnectionManager - events: TypedEventTarget - pubsub?: PubSub - logger: ComponentLogger -} - -export class MockNetwork { - private components: MockNetworkComponents[] = [] - - addNode (components: MockNetworkComponents): void { - this.components.push(components) - } - - getNode (peerId: PeerId | Multiaddr []): MockNetworkComponents { - if (Array.isArray(peerId) && peerId.length > 0) { - peerId = peerIdFromString(peerId[0].getPeerId() ?? '') - } else if (isPeerId(peerId)) { - for (const components of this.components) { - if (peerId.equals(components.peerId)) { - return components - } - } - } - - throw new Error('Peer not found') - } - - reset (): void { - this.components = [] - } -} - -export const mockNetwork = new MockNetwork() - -export interface MockConnectionManagerComponents { - peerId: PeerId - registrar: Registrar - events: TypedEventTarget -} - -class MockConnectionManager implements ConnectionManager, Startable { - private connections: Connection[] = [] - private readonly components: MockConnectionManagerComponents - private started = false - - constructor (components: MockConnectionManagerComponents) { - this.components = components - } - - isStarted (): boolean { - return this.started - } - - async start (): Promise { - this.started = true - } - - async stop (): Promise { - for (const connection of this.connections) { - await this.closeConnections(connection.remotePeer) - } - - this.started = false - } - - getConnections (peerId?: PeerId): Connection[] { - if (peerId != null) { - return this.connections - .filter(c => c.remotePeer.toString() === peerId.toString()) - } - - return this.connections - } - - getConnectionsMap (): PeerMap { - const map = new PeerMap() - - for (const conn of this.connections) { - const conns: Connection[] = map.get(conn.remotePeer) ?? [] - conns.push(conn) - - map.set(conn.remotePeer, conns) - } - - return map - } - - getMaxConnections (): number { - return 10_000 - } - - setMaxConnections (): void { - - } - - async openConnection (peerId: PeerId | Multiaddr | Multiaddr[], options?: AbortOptions): Promise { - if (isMultiaddr(peerId)) { - throw new UnsupportedOperationError('Dialing multiaddrs not supported') - } - - options?.signal?.throwIfAborted() - - let existingConnections: Connection[] = [] - - if (Array.isArray(peerId) && peerId.length > 0) { - existingConnections = this.getConnections(peerIdFromString(peerId[0].getPeerId() ?? '')) - } else if (isPeerId(peerId)) { - existingConnections = this.getConnections(peerId) - } - - if (existingConnections.length > 0) { - return existingConnections[0] - } - - const componentsB = mockNetwork.getNode(peerId) - - const [aToB, bToA] = connectionPair(this.components, componentsB) - - // track connections - this.connections.push(aToB) - ;(componentsB.connectionManager as MockConnectionManager).connections.push(bToA) - - this.components.events.safeDispatchEvent('connection:open', { - detail: aToB - }) - - for (const protocol of this.components.registrar.getProtocols()) { - for (const topology of this.components.registrar.getTopologies(protocol)) { - topology.onConnect?.(componentsB.peerId, aToB) - } - } - - this.components.events.safeDispatchEvent('peer:connect', { detail: componentsB.peerId }) - - componentsB.events.safeDispatchEvent('connection:open', { - detail: bToA - }) - - for (const protocol of componentsB.registrar.getProtocols()) { - for (const topology of componentsB.registrar.getTopologies(protocol)) { - topology.onConnect?.(this.components.peerId, bToA) - } - } - - componentsB.events.safeDispatchEvent('peer:connect', { detail: this.components.peerId }) - - return aToB - } - - async closeConnections (peerId: PeerId): Promise { - const connections = this.getConnections(peerId) - - if (connections.length === 0) { - return - } - - const componentsB = mockNetwork.getNode(peerId) - - for (const protocol of this.components.registrar.getProtocols()) { - this.components.registrar.getTopologies(protocol).forEach(topology => { - topology.onDisconnect?.(componentsB.peerId) - }) - } - - for (const conn of connections) { - await conn.close() - } - - this.connections = this.connections.filter(c => !c.remotePeer.equals(peerId)) - - if (this.connections.filter(c => !c.remotePeer.equals(peerId)).length === 0) { - componentsB.events.safeDispatchEvent('peer:disconnect', { detail: peerId }) - } - - await componentsB.connectionManager?.closeConnections(this.components.peerId) - - if (componentsB.connectionManager?.getConnectionsMap().get(this.components.peerId) == null) { - componentsB.events.safeDispatchEvent('peer:disconnect', { detail: this.components.peerId }) - } - } - - async acceptIncomingConnection (): Promise { - return true - } - - afterUpgradeInbound (): void { - - } - - getDialQueue (): PendingDial[] { - return [] - } - - async isDialable (): Promise { - return true - } -} - -export function mockConnectionManager (components: MockConnectionManagerComponents): ConnectionManager { - return new MockConnectionManager(components) -} diff --git a/packages/interface-compliance-tests/src/mocks/connection.ts b/packages/interface-compliance-tests/src/mocks/connection.ts deleted file mode 100644 index 130123c025..0000000000 --- a/packages/interface-compliance-tests/src/mocks/connection.ts +++ /dev/null @@ -1,309 +0,0 @@ -import { ConnectionClosedError } from '@libp2p/interface' -import { defaultLogger, logger } from '@libp2p/logger' -import * as mss from '@libp2p/multistream-select' -import { peerIdFromString } from '@libp2p/peer-id' -import { closeSource } from '@libp2p/utils/close-source' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' -import { Uint8ArrayList } from 'uint8arraylist' -import { mockMultiaddrConnection } from './multiaddr-connection.js' -import { mockMuxer } from './muxer.js' -import { mockRegistrar } from './registrar.js' -import type { AbortOptions, ComponentLogger, Logger, MultiaddrConnection, Connection, Stream, Direction, ConnectionTimeline, ConnectionStatus, PeerId, StreamMuxer, StreamMuxerFactory, NewStreamOptions, ConnectionLimits } from '@libp2p/interface' -import type { Registrar } from '@libp2p/interface-internal' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { Duplex, Source } from 'it-stream-types' - -export interface MockConnectionOptions { - direction?: Direction - registrar?: Registrar - muxerFactory?: StreamMuxerFactory - logger?: ComponentLogger -} - -interface MockConnectionInit { - remoteAddr: Multiaddr - remotePeer: PeerId - direction: Direction - maConn: MultiaddrConnection - muxer: StreamMuxer - logger: ComponentLogger -} - -class MockConnection implements Connection { - public id: string - public remoteAddr: Multiaddr - public remotePeer: PeerId - public direction: Direction - public timeline: ConnectionTimeline - public multiplexer?: string - public encryption?: string - public status: ConnectionStatus - public streams: Stream[] - public tags: string[] - public limits?: ConnectionLimits - public log: Logger - - private readonly muxer: StreamMuxer - private readonly maConn: MultiaddrConnection - private readonly logger: ComponentLogger - - constructor (init: MockConnectionInit) { - const { remoteAddr, remotePeer, direction, maConn, muxer, logger } = init - - this.id = `mock-connection-${Math.random()}` - this.remoteAddr = remoteAddr - this.remotePeer = remotePeer - this.direction = direction - this.status = 'open' - this.direction = direction - this.timeline = maConn.timeline - this.multiplexer = 'test-multiplexer' - this.encryption = 'yes-yes-very-secure' - this.streams = [] - this.tags = [] - this.muxer = muxer - this.maConn = maConn - this.logger = logger - this.log = logger.forComponent(this.id) - } - - async newStream (protocols: string | string[], options?: NewStreamOptions): Promise { - if (!Array.isArray(protocols)) { - protocols = [protocols] - } - - if (protocols.length === 0) { - throw new Error('protocols must have a length') - } - - if (this.status !== 'open') { - throw new ConnectionClosedError('connection must be open to create streams') - } - - options?.signal?.throwIfAborted() - - const id = `${Math.random()}` - const stream = await this.muxer.newStream(id) - const result = await mss.select(stream, protocols, { - ...options, - log: this.logger.forComponent('libp2p:mock-connection:stream:mss:select') - }) - - stream.protocol = result.protocol - stream.direction = 'outbound' - stream.sink = result.stream.sink - stream.source = result.stream.source - - this.streams.push(stream) - - return stream - } - - async close (options?: AbortOptions): Promise { - this.status = 'closing' - await Promise.all( - this.streams.map(async s => s.close(options)) - ) - await this.maConn.close() - this.status = 'closed' - this.timeline.close = Date.now() - } - - abort (err: Error): void { - this.status = 'closing' - this.streams.forEach(s => { - s.abort(err) - }) - this.maConn.abort(err) - this.status = 'closed' - this.timeline.close = Date.now() - } -} - -export function mockConnection (maConn: MultiaddrConnection, opts: MockConnectionOptions = {}): Connection { - const remoteAddr = maConn.remoteAddr - const remotePeerIdStr = remoteAddr.getPeerId() ?? '12D3KooWCrhmFM1BCPGBkNzbPfDk4cjYmtAYSpZwUBC69Qg2kZyq' - const logger = opts.logger ?? defaultLogger() - - if (remotePeerIdStr == null) { - throw new Error('Remote multiaddr must contain a peer id') - } - - const remotePeer = peerIdFromString(remotePeerIdStr) - const direction = opts.direction ?? 'inbound' - const registrar = opts.registrar ?? mockRegistrar() - const muxerFactory = opts.muxerFactory ?? mockMuxer() - const log = logger.forComponent('libp2p:mock-muxer') - - const muxer = muxerFactory.createStreamMuxer({ - log, - direction, - onIncomingStream: (muxedStream) => { - try { - mss.handle(muxedStream, registrar.getProtocols(), { - log - }) - .then(async ({ stream, protocol }) => { - log('%s: incoming stream opened on %s', direction, protocol) - muxedStream.protocol = protocol - muxedStream.sink = stream.sink - muxedStream.source = stream.source - - connection.streams.push(muxedStream) - const { handler } = registrar.getHandler(protocol) - - await handler({ connection, stream: muxedStream }) - }).catch(err => { - log.error(err) - muxedStream.abort(err) - }) - } catch (err: any) { - log.error(err) - } - }, - onStreamEnd: (muxedStream) => { - connection.streams = connection.streams.filter(stream => stream.id !== muxedStream.id) - } - }) - - void pipe( - maConn, muxer, maConn - ) - - const connection = new MockConnection({ - remoteAddr, - remotePeer, - direction, - maConn, - muxer, - logger - }) - - return connection -} - -export interface StreamInit { - direction?: Direction - protocol?: string - id?: string -} - -export function mockStream (stream: Duplex, Source, Promise>, init: StreamInit = {}): Stream { - const id = `stream-${Date.now()}` - const log = logger(`libp2p:mock-stream:${id}`) - - // ensure stream output is `Uint8ArrayList` as it would be from an actual - // Stream where everything is length-varint encoded - const originalSource = stream.source - stream.source = (async function * (): AsyncGenerator { - for await (const buf of originalSource) { - if (buf instanceof Uint8Array) { - yield new Uint8ArrayList(buf) - } else { - yield buf - } - } - })() - - const abortSinkController = new AbortController() - const originalSink = stream.sink.bind(stream) - stream.sink = async (source) => { - abortSinkController.signal.addEventListener('abort', () => { - closeSource(source, log) - }) - - await originalSink(source) - } - - const mockStream: Stream = { - ...stream, - close: async (options) => { - await mockStream.closeRead(options) - await mockStream.closeWrite(options) - }, - closeRead: async () => { - closeSource(originalSource, log) - mockStream.timeline.closeRead = Date.now() - - if (mockStream.timeline.closeWrite != null) { - mockStream.timeline.close = Date.now() - } - }, - closeWrite: async () => { - abortSinkController.abort() - mockStream.timeline.closeWrite = Date.now() - - if (mockStream.timeline.closeRead != null) { - mockStream.timeline.close = Date.now() - } - }, - abort: () => { - closeSource(originalSource, log) - mockStream.timeline.closeWrite = Date.now() - mockStream.timeline.closeRead = Date.now() - mockStream.timeline.close = Date.now() - }, - direction: 'outbound', - protocol: '/foo/1.0.0', - timeline: { - open: Date.now() - }, - metadata: {}, - id: `stream-${Date.now()}`, - status: 'open', - readStatus: 'ready', - writeStatus: 'ready', - log: logger('mock-stream'), - ...init - } - - return mockStream -} - -export interface StreamPairInit { - duplex: Duplex, Source, Promise> - init?: StreamInit -} - -export function streamPair (a: StreamPairInit, b: StreamPairInit, init: StreamInit = {}): [Stream, Stream] { - return [ - mockStream(a.duplex, { - direction: 'outbound', - ...init, - ...(a.init ?? {}) - }), - mockStream(b.duplex, { - direction: 'inbound', - ...init, - ...(b.init ?? {}) - }) - ] -} - -export interface Peer { - peerId: PeerId - registrar: Registrar -} - -export function multiaddrConnectionPair (a: { peerId: PeerId, registrar: Registrar }, b: { peerId: PeerId, registrar: Registrar }): [ MultiaddrConnection, MultiaddrConnection ] { - const [peerBtoPeerA, peerAtoPeerB] = duplexPair() - - return [ - mockMultiaddrConnection(peerAtoPeerB, b.peerId), - mockMultiaddrConnection(peerBtoPeerA, a.peerId) - ] -} - -export function connectionPair (a: { peerId: PeerId, registrar: Registrar }, b: { peerId: PeerId, registrar: Registrar }): [ Connection, Connection ] { - const [peerBtoPeerA, peerAtoPeerB] = multiaddrConnectionPair(a, b) - - return [ - mockConnection(peerBtoPeerA, { - registrar: a.registrar - }), - mockConnection(peerAtoPeerB, { - registrar: b.registrar - }) - ] -} diff --git a/packages/interface-compliance-tests/src/mocks/duplex.ts b/packages/interface-compliance-tests/src/mocks/duplex.ts deleted file mode 100644 index 81394606ef..0000000000 --- a/packages/interface-compliance-tests/src/mocks/duplex.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' - -export function mockDuplex (): Duplex, Source, Promise> { - return { - source: (async function * () { - yield * [] - }()), - sink: async () => {} - } -} diff --git a/packages/interface-compliance-tests/src/mocks/index.ts b/packages/interface-compliance-tests/src/mocks/index.ts deleted file mode 100644 index bdf8c1a5a3..0000000000 --- a/packages/interface-compliance-tests/src/mocks/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -export { mockConnectionManager, mockNetwork } from './connection-manager.js' -export { mockConnection, mockStream, streamPair, connectionPair } from './connection.js' -export { mockMultiaddrConnection, mockMultiaddrConnPair } from './multiaddr-connection.js' -export { mockMuxer } from './muxer.js' -export { mockRegistrar } from './registrar.js' -export { mockUpgrader } from './upgrader.js' -export { mockDuplex } from './duplex.js' -export type { MockUpgraderInit } from './upgrader.js' -export type { MockNetworkComponents, MockConnectionManagerComponents, MockNetwork } from './connection-manager.js' -export type { MockConnectionOptions, StreamInit, StreamPairInit } from './connection.js' -export type { MockMultiaddrConnPairOptions } from './multiaddr-connection.js' diff --git a/packages/interface-compliance-tests/src/mocks/multiaddr-connection.ts b/packages/interface-compliance-tests/src/mocks/multiaddr-connection.ts deleted file mode 100644 index d43237c8e8..0000000000 --- a/packages/interface-compliance-tests/src/mocks/multiaddr-connection.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { logger } from '@libp2p/logger' -import { multiaddr } from '@multiformats/multiaddr' -import { abortableSource } from 'abortable-iterator' -import { duplexPair } from 'it-pair/duplex' -import type { MultiaddrConnection, PeerId } from '@libp2p/interface' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { Duplex } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' - -export function mockMultiaddrConnection (source: Duplex> & Partial, peerId: PeerId): MultiaddrConnection { - const maConn: MultiaddrConnection = { - async close () { - - }, - abort: () => {}, - timeline: { - open: Date.now() - }, - remoteAddr: multiaddr(`/ip4/127.0.0.1/tcp/4001/p2p/${peerId.toString()}`), - log: logger('mock-maconn'), - ...source - } - - return maConn -} - -export interface MockMultiaddrConnPairOptions { - addrs: Multiaddr[] - remotePeer: PeerId -} - -/** - * Returns both sides of a mocked MultiaddrConnection - */ -export function mockMultiaddrConnPair (opts: MockMultiaddrConnPairOptions): { inbound: MultiaddrConnection, outbound: MultiaddrConnection } { - const { addrs, remotePeer } = opts - const controller = new AbortController() - const [localAddr, remoteAddr] = addrs - const [inboundStream, outboundStream] = duplexPair() - - const outbound: MultiaddrConnection = { - ...outboundStream, - remoteAddr: remoteAddr.toString().includes(`/p2p/${remotePeer.toString()}`) ? remoteAddr : remoteAddr.encapsulate(`/p2p/${remotePeer.toString()}`), - timeline: { - open: Date.now() - }, - close: async () => { - outbound.timeline.close = Date.now() - controller.abort() - }, - abort: (err: Error) => { - outbound.timeline.close = Date.now() - controller.abort(err) - }, - log: logger('mock-maconn-outbound') - } - - const inbound: MultiaddrConnection = { - ...inboundStream, - remoteAddr: localAddr, - timeline: { - open: Date.now() - }, - close: async () => { - inbound.timeline.close = Date.now() - controller.abort() - }, - abort: (err: Error) => { - outbound.timeline.close = Date.now() - controller.abort(err) - }, - log: logger('mock-maconn-inbound') - } - - // Make the sources abortable so we can close them easily - inbound.source = abortableSource(inbound.source, controller.signal) - outbound.source = abortableSource(outbound.source, controller.signal) - - return { inbound, outbound } -} diff --git a/packages/interface-compliance-tests/src/mocks/muxer.ts b/packages/interface-compliance-tests/src/mocks/muxer.ts deleted file mode 100644 index 8a7b87f154..0000000000 --- a/packages/interface-compliance-tests/src/mocks/muxer.ts +++ /dev/null @@ -1,331 +0,0 @@ -import { defaultLogger, logger } from '@libp2p/logger' -import { AbstractStream } from '@libp2p/utils/abstract-stream' -import { abortableSource } from 'abortable-iterator' -import map from 'it-map' -import * as ndjson from 'it-ndjson' -import { pipe } from 'it-pipe' -import { pushable } from 'it-pushable' -import { Uint8ArrayList } from 'uint8arraylist' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import type { AbortOptions, Direction, Stream, StreamMuxer, StreamMuxerFactory, StreamMuxerInit } from '@libp2p/interface' -import type { Logger } from '@libp2p/logger' -import type { AbstractStreamInit } from '@libp2p/utils/abstract-stream' -import type { Pushable } from 'it-pushable' -import type { Source } from 'it-stream-types' - -let muxers = 0 -let streams = 0 - -interface DataMessage { - id: string - type: 'data' - direction: Direction - chunk: string -} - -interface ResetMessage { - id: string - type: 'reset' - direction: Direction -} - -interface CloseWriteMessage { - id: string - type: 'closeWrite' - direction: Direction -} - -interface CloseReadMessage { - id: string - type: 'closeRead' - direction: Direction -} - -interface CreateMessage { - id: string - type: 'create' - direction: 'outbound' -} - -type StreamMessage = DataMessage | ResetMessage | CloseWriteMessage | CloseReadMessage | CreateMessage - -export interface MockMuxedStreamInit extends AbstractStreamInit { - push: Pushable -} - -class MuxedStream extends AbstractStream { - private readonly push: Pushable - - constructor (init: MockMuxedStreamInit) { - super(init) - - this.push = init.push - } - - sendNewStream (): void { - // If initiator, open a new stream - const createMsg: CreateMessage = { - id: this.id, - type: 'create', - direction: 'outbound' - } - this.push.push(createMsg) - } - - sendData (data: Uint8ArrayList): void { - const dataMsg: DataMessage = { - id: this.id, - type: 'data', - chunk: uint8ArrayToString(data.subarray(), 'base64pad'), - direction: this.direction - } - this.push.push(dataMsg) - } - - sendReset (): void { - const resetMsg: ResetMessage = { - id: this.id, - type: 'reset', - direction: this.direction - } - this.push.push(resetMsg) - } - - sendCloseWrite (): void { - const closeMsg: CloseWriteMessage = { - id: this.id, - type: 'closeWrite', - direction: this.direction - } - this.push.push(closeMsg) - } - - sendCloseRead (): void { - const closeMsg: CloseReadMessage = { - id: this.id, - type: 'closeRead', - direction: this.direction - } - this.push.push(closeMsg) - } -} - -class MockMuxer implements StreamMuxer { - public source: AsyncGenerator - public input: Pushable - public streamInput: Pushable - public name: string - public protocol: string = '/mock-muxer/1.0.0' - - private readonly closeController: AbortController - private readonly registryInitiatorStreams: Map - private readonly registryRecipientStreams: Map - private readonly options: StreamMuxerInit - - private readonly log: Logger - - constructor (init?: StreamMuxerInit) { - this.name = `muxer:${muxers++}` - this.log = logger(`libp2p:mock-muxer:${this.name}`) - this.registryInitiatorStreams = new Map() - this.registryRecipientStreams = new Map() - this.log('create muxer') - this.options = init ?? { direction: 'inbound', log: defaultLogger().forComponent('mock-muxer') } - this.closeController = new AbortController() - // receives data from the muxer at the other end of the stream - this.source = this.input = pushable({ - onEnd: () => { - for (const stream of this.registryInitiatorStreams.values()) { - stream.destroy() - } - - for (const stream of this.registryRecipientStreams.values()) { - stream.destroy() - } - } - }) - - // receives messages from all of the muxed streams - this.streamInput = pushable({ - objectMode: true - }) - } - - // receive incoming messages - async sink (source: Source): Promise { - try { - await pipe( - abortableSource(source, this.closeController.signal), - (source) => map(source, buf => uint8ArrayToString(buf.subarray())), - ndjson.parse, - async (source) => { - for await (const message of source) { - this.log.trace('-> %s %s %s', message.type, message.direction, message.id) - this.handleMessage(message) - } - } - ) - - this.log('muxer ended') - this.input.end() - } catch (err: any) { - this.log.error('muxer errored - %e', err) - this.input.end(err) - } - } - - handleMessage (message: StreamMessage): void { - let muxedStream: MuxedStream | undefined - - const registry = message.direction === 'outbound' ? this.registryRecipientStreams : this.registryInitiatorStreams - - if (message.type === 'create') { - if (registry.has(message.id)) { - throw new Error(`Already had stream for ${message.id}`) - } - - muxedStream = this.createStream(message.id, 'inbound') - registry.set(muxedStream.id, muxedStream) - - if (this.options.onIncomingStream != null) { - this.options.onIncomingStream(muxedStream) - } - } - - muxedStream = registry.get(message.id) - - if (muxedStream == null) { - this.log.error(`No stream found for ${message.id}`) - - return - } - - if (message.type === 'data') { - muxedStream.sourcePush(new Uint8ArrayList(uint8ArrayFromString(message.chunk, 'base64pad'))) - } else if (message.type === 'reset') { - this.log('-> reset stream %s %s', muxedStream.direction, muxedStream.id) - muxedStream.reset() - } else if (message.type === 'closeWrite') { - this.log('-> closing writeable end of stream %s %s', muxedStream.direction, muxedStream.id) - muxedStream.remoteCloseWrite() - } else if (message.type === 'closeRead') { - this.log('-> closing readable end of stream %s %s', muxedStream.direction, muxedStream.id) - muxedStream.remoteCloseRead() - } - } - - get streams (): Stream[] { - return Array.from(this.registryRecipientStreams.values()) - .concat(Array.from(this.registryInitiatorStreams.values())) - } - - newStream (name?: string): Stream { - if (this.closeController.signal.aborted) { - throw new Error('Muxer already closed') - } - this.log('newStream %s', name) - const storedStream = this.createStream(name, 'outbound') - this.registryInitiatorStreams.set(storedStream.id, storedStream) - - return storedStream - } - - createStream (name?: string, direction: Direction = 'outbound'): MuxedStream { - const id = name ?? `${streams++}` - - this.log('createStream %s %s', direction, id) - - const muxedStream: MuxedStream = new MuxedStream({ - id, - direction, - push: this.streamInput, - onEnd: () => { - this.log('stream ended') - - if (direction === 'outbound') { - this.registryInitiatorStreams.delete(muxedStream.id) - } else { - this.registryRecipientStreams.delete(muxedStream.id) - } - - if (this.options.onStreamEnd != null) { - this.options.onStreamEnd(muxedStream) - } - }, - log: logger(`libp2p:mock-muxer:stream:${direction}:${id}`) - }) - - return muxedStream - } - - async close (options?: AbortOptions): Promise { - if (this.closeController.signal.aborted) { - return - } - - const signal = options?.signal ?? AbortSignal.timeout(10) - - try { - // try to gracefully close all streams - await Promise.all( - this.streams.map(async s => s.close({ - signal - })) - ) - - this.input.end() - - // try to gracefully close the muxer - await this.input.onEmpty({ - signal - }) - - this.closeController.abort() - } catch (err: any) { - this.abort(err) - } - } - - abort (err: Error): void { - if (this.closeController.signal.aborted) { - return - } - - this.log('aborting muxed streams') - - this.streams.forEach(s => { - s.abort(err) - }) - - this.closeController.abort(err) - this.input.end(err) - } -} - -class MockMuxerFactory implements StreamMuxerFactory { - public protocol: string = '/mock-muxer/1.0.0' - - createStreamMuxer (init?: StreamMuxerInit): StreamMuxer { - const mockMuxer = new MockMuxer(init) - - void Promise.resolve().then(async () => { - void pipe( - mockMuxer.streamInput, - ndjson.stringify, - (source) => map(source, str => new Uint8ArrayList(uint8ArrayFromString(str))), - async (source) => { - for await (const buf of source) { - mockMuxer.input.push(buf.subarray()) - } - } - ) - }) - - return mockMuxer - } -} - -export function mockMuxer (): StreamMuxerFactory { - return new MockMuxerFactory() -} diff --git a/packages/interface-compliance-tests/src/mocks/registrar.ts b/packages/interface-compliance-tests/src/mocks/registrar.ts deleted file mode 100644 index 1b2aa21bd8..0000000000 --- a/packages/interface-compliance-tests/src/mocks/registrar.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { mergeOptions } from '@libp2p/utils/merge-options' -import type { Connection, PeerId, Topology, IncomingStreamData, StreamHandler, StreamHandlerOptions, StreamHandlerRecord } from '@libp2p/interface' -import type { Registrar } from '@libp2p/interface-internal' - -export class MockRegistrar implements Registrar { - private readonly topologies = new Map>() - private readonly handlers = new Map() - - getProtocols (): string[] { - return Array.from(this.handlers.keys()).sort() - } - - async handle (protocol: string, handler: StreamHandler, opts?: StreamHandlerOptions): Promise { - const options = mergeOptions.bind({ ignoreUndefined: true })({ - maxInboundStreams: 1, - maxOutboundStreams: 1 - }, opts) - - if (this.handlers.has(protocol)) { - throw new Error(`Handler already registered for protocol ${protocol}`) - } - - this.handlers.set(protocol, { - handler, - options - }) - } - - async unhandle (protocol: string): Promise { - this.handlers.delete(protocol) - } - - getHandler (protocol: string): StreamHandlerRecord { - const handler = this.handlers.get(protocol) - - if (handler == null) { - throw new Error(`No handler registered for protocol ${protocol}`) - } - - return handler - } - - async register (protocol: string, topology: Topology): Promise { - const id = `topology-id-${Math.random()}` - let topologies = this.topologies.get(protocol) - - if (topologies == null) { - topologies = [] - } - - topologies.push({ - id, - topology - }) - - this.topologies.set(protocol, topologies) - - return id - } - - unregister (id: string | string[]): void { - if (!Array.isArray(id)) { - id = [id] - } - - id.forEach(id => this.topologies.delete(id)) - } - - getTopologies (protocol: string): Topology[] { - return (this.topologies.get(protocol) ?? []).map(t => t.topology) - } -} - -export function mockRegistrar (): Registrar { - return new MockRegistrar() -} - -export async function mockIncomingStreamEvent (protocol: string, conn: Connection, remotePeer: PeerId): Promise { - return { - ...await conn.newStream([protocol]), - // @ts-expect-error incomplete implementation - connection: { - remotePeer - } - } -} diff --git a/packages/interface-compliance-tests/src/mocks/upgrader.ts b/packages/interface-compliance-tests/src/mocks/upgrader.ts deleted file mode 100644 index 8eca7148d0..0000000000 --- a/packages/interface-compliance-tests/src/mocks/upgrader.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { anySignal } from 'any-signal' -import { setMaxListeners } from 'main-event' -import { mockConnection } from './connection.js' -import type { Libp2pEvents, Connection, MultiaddrConnection, Upgrader, UpgraderOptions, ClearableSignal, ConnectionEncrypter, StreamMuxerFactory } from '@libp2p/interface' -import type { Registrar } from '@libp2p/interface-internal' -import type { TypedEventTarget } from 'main-event' - -export interface MockUpgraderInit { - registrar?: Registrar - events?: TypedEventTarget -} - -class MockUpgrader implements Upgrader { - private readonly registrar?: Registrar - private readonly events?: TypedEventTarget - - constructor (init: MockUpgraderInit) { - this.registrar = init.registrar - this.events = init.events - } - - async upgradeOutbound (multiaddrConnection: MultiaddrConnection, opts: UpgraderOptions): Promise { - const connection = mockConnection(multiaddrConnection, { - direction: 'outbound', - registrar: this.registrar, - ...opts - }) - - this.events?.safeDispatchEvent('connection:open', { detail: connection }) - - return connection - } - - async upgradeInbound (multiaddrConnection: MultiaddrConnection, opts: UpgraderOptions): Promise { - const connection = mockConnection(multiaddrConnection, { - direction: 'inbound', - registrar: this.registrar, - ...opts - }) - - this.events?.safeDispatchEvent('connection:open', { detail: connection }) - } - - createInboundAbortSignal (signal?: AbortSignal): ClearableSignal { - const output = anySignal([ - AbortSignal.timeout(10_000), - signal - ]) - setMaxListeners(Infinity, output) - - return output - } - - getConnectionEncrypters (): Map> { - return new Map() - } - - getStreamMuxers (): Map { - return new Map() - } -} - -export function mockUpgrader (init: MockUpgraderInit = {}): Upgrader { - return new MockUpgrader(init) -} diff --git a/packages/interface-compliance-tests/src/pubsub/api.ts b/packages/interface-compliance-tests/src/pubsub/api.ts deleted file mode 100644 index 811d40cf23..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/api.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { isPubSub, isStartable, start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import delay from 'delay' -import pDefer from 'p-defer' -import pWaitFor from 'p-wait-for' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents } from './utils.js' -import type { PubSubArgs, PubSubComponents } from './index.js' -import type { TestSetup } from '../index.js' -import type { PubSub } from '@libp2p/interface' - -const topic = 'foo' -const data = uint8ArrayFromString('bar') - -export default (common: TestSetup): void => { - describe('pubsub api', () => { - let pubsub: PubSub - let components: PubSubComponents - - // Create pubsub router - beforeEach(async () => { - mockNetwork.reset() - components = await createComponents() - - pubsub = components.pubsub = await common.setup({ - components, - init: { - emitSelf: true - } - }) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(components)) - await common.teardown() - mockNetwork.reset() - }) - - it('is a PubSub implementation', () => { - expect(isPubSub(pubsub)).to.be.true() - }) - - it('can start correctly', async () => { - if (!isStartable(pubsub)) { - return - } - - sinon.spy(components.registrar, 'register') - - await start(...Object.values(components)) - - expect(components.registrar.register).to.have.property('callCount', 1) - }) - - it('can stop correctly', async () => { - if (!isStartable(pubsub)) { - return - } - - sinon.spy(components.registrar, 'unregister') - - await start(...Object.values(components)) - await stop(...Object.values(components)) - - expect(components.registrar.unregister).to.have.property('callCount', 1) - }) - - it('can subscribe and unsubscribe correctly', async () => { - const handler = (): void => { - throw new Error('a message should not be received') - } - - await start(...Object.values(components)) - pubsub.subscribe(topic) - pubsub.addEventListener('message', handler) - - await pWaitFor(() => { - const topics = pubsub.getTopics() - return topics.length === 1 && topics[0] === topic - }) - - pubsub.removeEventListener('message', handler) - pubsub.unsubscribe(topic) - - await pWaitFor(() => pubsub.getTopics().length === 0) - - // Publish to guarantee the handler is not called - await pubsub.publish(topic, data) - - // handlers are called async - await delay(100) - - await stop(...Object.values(components)) - }) - - it('can subscribe and publish correctly', async () => { - const defer = pDefer() - - await start(...Object.values(components)) - - pubsub.subscribe(topic) - pubsub.addEventListener('message', (evt) => { - expect(evt).to.have.nested.property('detail.topic', topic) - expect(evt).to.have.deep.nested.property('detail.data', data) - defer.resolve() - }) - await pubsub.publish(topic, data) - await defer.promise - - await stop(...Object.values(components)) - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/connection-handlers.ts b/packages/interface-compliance-tests/src/pubsub/connection-handlers.ts deleted file mode 100644 index eefdd552f8..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/connection-handlers.ts +++ /dev/null @@ -1,413 +0,0 @@ -import { start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import pDefer from 'p-defer' -import { pEvent } from 'p-event' -import pWaitFor from 'p-wait-for' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents } from './utils.js' -import type { PubSubArgs } from './index.js' -import type { TestSetup } from '../index.js' -import type { MockNetworkComponents } from '../mocks/index.js' -import type { Message, PubSub } from '@libp2p/interface' - -export default (common: TestSetup): void => { - describe('pubsub connection handlers', () => { - let psA: PubSub - let psB: PubSub - let componentsA: MockNetworkComponents - let componentsB: MockNetworkComponents - - describe('nodes send state on connection', () => { - // Create pubsub nodes and connect them - beforeEach(async () => { - mockNetwork.reset() - - componentsA = await createComponents() - componentsB = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: {} - }) - - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: {} - }) - - // Start pubsub - await start(...Object.values(componentsA), ...Object.values(componentsB)) - - expect(psA.getPeers()).to.be.empty() - expect(psB.getPeers()).to.be.empty() - - // Make subscriptions prior to nodes connected - psA.subscribe('Za') - psB.subscribe('Zb') - - expect(psA.getPeers()).to.be.empty() - expect(psA.getTopics()).to.deep.equal(['Za']) - expect(psB.getPeers()).to.be.empty() - expect(psB.getTopics()).to.deep.equal(['Zb']) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB)) - await common.teardown() - mockNetwork.reset() - }) - - it('existing subscriptions are sent upon peer connection', async function () { - const subscriptionsChanged = Promise.all([ - pEvent(psA, 'subscription-change'), - pEvent(psB, 'subscription-change') - ]) - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - await subscriptionsChanged - - expect(psA.getPeers()).to.have.lengthOf(1) - expect(psB.getPeers()).to.have.lengthOf(1) - - expect(psA.getTopics()).to.deep.equal(['Za']) - expect(psB.getTopics()).to.deep.equal(['Zb']) - - expect(psA.getSubscribers('Zb').map(p => p.toString())).to.deep.equal([componentsB.peerId.toString()]) - expect(psB.getSubscribers('Za').map(p => p.toString())).to.deep.equal([componentsA.peerId.toString()]) - }) - }) - - describe('pubsub started before connect', () => { - let psA: PubSub - let psB: PubSub - let componentsA: MockNetworkComponents - let componentsB: MockNetworkComponents - - // Create pubsub nodes and start them - beforeEach(async () => { - mockNetwork.reset() - componentsA = await createComponents() - componentsB = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: {} - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: {} - }) - - await start(...Object.values(componentsA), ...Object.values(componentsB)) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB)) - await common.teardown() - mockNetwork.reset() - }) - - it('should get notified of connected peers on dial', async () => { - await componentsA.connectionManager.openConnection(componentsB.peerId) - - return Promise.all([ - pWaitFor(() => psA.getPeers().length === 1), - pWaitFor(() => psB.getPeers().length === 1) - ]) - }) - - it('should receive pubsub messages', async () => { - const defer = pDefer() - const topic = 'test-topic' - const data = uint8ArrayFromString('hey!') - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - let subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.not.include(topic) - - psA.subscribe(topic) - psA.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - const msg = evt.detail - expect(msg.data).to.equalBytes(data) - defer.resolve() - } - }) - psA.subscribe(topic) - - subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.include(topic) - - // wait for psB to know about psA subscription - await pWaitFor(() => { - const subscribedPeers = psB.getSubscribers(topic) - return subscribedPeers.map(p => p.toString()).includes(componentsA.peerId.toString()) // eslint-disable-line max-nested-callbacks - }) - await psB.publish(topic, data) - - await defer.promise - }) - }) - - describe('pubsub started after connect', () => { - let psA: PubSub - let psB: PubSub - let componentsA: MockNetworkComponents - let componentsB: MockNetworkComponents - - // Create pubsub nodes - beforeEach(async () => { - mockNetwork.reset() - componentsA = await createComponents() - componentsB = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: {} - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: {} - }) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB)) - await common.teardown() - mockNetwork.reset() - }) - - it('should get notified of connected peers after starting', async () => { - await start(...Object.values(componentsA), ...Object.values(componentsB)) - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - return Promise.all([ - pWaitFor(() => psA.getPeers().length === 1), - pWaitFor(() => psB.getPeers().length === 1) - ]) - }) - - it('should receive pubsub messages', async () => { - const defer = pDefer() - const topic = 'test-topic' - const data = uint8ArrayFromString('hey!') - - await start(...Object.values(componentsA), ...Object.values(componentsB)) - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - await Promise.all([ - pWaitFor(() => psA.getPeers().length === 1), - pWaitFor(() => psB.getPeers().length === 1) - ]) - - let subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.not.include(topic) - - psA.subscribe(topic) - psA.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - const msg = evt.detail - expect(msg.data).to.equalBytes(data) - defer.resolve() - } - }) - psA.subscribe(topic) - - subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.include(topic) - - // wait for psB to know about psA subscription - await pWaitFor(() => { - const subscribedPeers = psB.getSubscribers(topic) - return subscribedPeers.map(p => p.toString()).includes(componentsA.peerId.toString()) // eslint-disable-line max-nested-callbacks - }) - await psB.publish(topic, data) - - await defer.promise - }) - }) - - describe('pubsub with intermittent connections', () => { - let psA: PubSub - let psB: PubSub - let componentsA: MockNetworkComponents - let componentsB: MockNetworkComponents - - // Create pubsub nodes and start them - beforeEach(async () => { - mockNetwork.reset() - componentsA = await createComponents() - componentsB = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: {} - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: {} - }) - - await start(...Object.values(componentsA), ...Object.values(componentsB)) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB)) - await common.teardown() - mockNetwork.reset() - }) - - it.skip('should receive pubsub messages after a node restart', async function () { - const topic = 'test-topic' - const data = uint8ArrayFromString('hey!') - - let counter = 0 - const defer1 = pDefer() - const defer2 = pDefer() - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - let subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.not.include(topic) - - psA.subscribe(topic) - psA.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - const msg = evt.detail - expect(msg.data).to.equalBytes(data) - counter++ - counter === 1 ? defer1.resolve() : defer2.resolve() - } - }) - psA.subscribe(topic) - - subscribedTopics = psA.getTopics() - expect(subscribedTopics).to.include(topic) - - // wait for psB to know about psA subscription - await pWaitFor(() => { - const subscribedPeers = psB.getSubscribers(topic) - return subscribedPeers.map(p => p.toString()).includes(componentsA.peerId.toString()) // eslint-disable-line max-nested-callbacks - }) - await psB.publish(topic, data) - - await defer1.promise - - await stop(psB) - await pWaitFor(() => { - // @ts-expect-error protected fields - const aHasConnectionToB = psA._libp2p.connectionManager.get(psB.peerId) - // @ts-expect-error protected fields - const bHasConnectionToA = psB._libp2p.connectionManager.get(psA.peerId) - - return aHasConnectionToB != null && bHasConnectionToA != null - }) - await start(psB) - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - // wait for remoteLibp2p to know about libp2p subscription - await pWaitFor(() => { - const subscribedPeers = psB.getSubscribers(topic) - return subscribedPeers.toString().includes(componentsA.peerId.toString()) - }) - - await psB.publish(topic, data) - - await defer2.promise - }) - - it.skip('should handle quick reconnects with a delayed disconnect', async () => { - // Subscribe on both - let aReceivedFirstMessageFromB = false - let aReceivedSecondMessageFromB = false - let bReceivedFirstMessageFromA = false - let bReceivedSecondMessageFromA = false - const topic = 'reconnect-channel' - - const handlerSpyA = (evt: CustomEvent): void => { - if (evt.detail.topic !== topic) { - return - } - - const message = evt.detail - const data = uint8ArrayToString(message.data) - - if (data === 'message-from-b-1') { - aReceivedFirstMessageFromB = true - } - - if (data === 'message-from-b-2') { - aReceivedSecondMessageFromB = true - } - } - const handlerSpyB = (evt: CustomEvent): void => { - if (evt.detail.topic !== topic) { - return - } - - const message = evt.detail - const data = uint8ArrayToString(message.data) - - if (data === 'message-from-a-1') { - bReceivedFirstMessageFromA = true - } - - if (data === 'message-from-a-2') { - bReceivedSecondMessageFromA = true - } - } - - psA.addEventListener('message', handlerSpyA) - psB.addEventListener('message', handlerSpyB) - psA.subscribe(topic) - psB.subscribe(topic) - - // Create two connections to the remote peer - // @ts-expect-error protected fields - const originalConnection = await psA._libp2p.dialer.connectToPeer(psB.peerId) - - // second connection - await componentsA.connectionManager.openConnection(componentsB.peerId) - - // Wait for subscriptions to occur - await pWaitFor(() => { - return psA.getSubscribers(topic).map(p => p.toString()).includes(componentsB.peerId.toString()) && - psB.getSubscribers(topic).map(p => p.toString()).includes(componentsA.peerId.toString()) - }) - - // Verify messages go both ways - await psA.publish(topic, uint8ArrayFromString('message-from-a-1')) - await psB.publish(topic, uint8ArrayFromString('message-from-b-1')) - await pWaitFor(() => { - return aReceivedFirstMessageFromB && bReceivedFirstMessageFromA - }) - - // Disconnect the first connection (this acts as a delayed reconnect) - // @ts-expect-error protected fields - const psAConnUpdateSpy = sinon.spy(psA._libp2p.connectionManager.connections, 'set') - - await originalConnection.close() - await pWaitFor(() => psAConnUpdateSpy.callCount === 1) - - // Verify messages go both ways after the disconnect - await psA.publish(topic, uint8ArrayFromString('message-from-a-2')) - await psB.publish(topic, uint8ArrayFromString('message-from-b-2')) - await pWaitFor(() => { - return aReceivedSecondMessageFromB && bReceivedSecondMessageFromA - }) - }) - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/emit-self.ts b/packages/interface-compliance-tests/src/pubsub/emit-self.ts deleted file mode 100644 index 03d6b4e973..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/emit-self.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents } from './utils.js' -import type { PubSubArgs, PubSubComponents } from './index.js' -import type { TestSetup } from '../index.js' -import type { PubSub } from '@libp2p/interface' - -const topic = 'foo' -const data = uint8ArrayFromString('bar') -const shouldNotHappen = (): void => expect.fail() - -export default (common: TestSetup): void => { - describe('emit self', () => { - describe('enabled', () => { - let pubsub: PubSub - let components: PubSubComponents - - before(async () => { - mockNetwork.reset() - components = await createComponents() - - pubsub = components.pubsub = await common.setup({ - components, - init: { - emitSelf: true - } - }) - - await start(...Object.values(components)) - pubsub.subscribe(topic) - }) - - after(async () => { - sinon.restore() - await stop(...Object.values(components)) - await common.teardown() - mockNetwork.reset() - }) - - it('should emit to self on publish', async () => { - const promise = new Promise((resolve) => { - pubsub.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - resolve() - } - }, { - once: true - }) - }) - - const result = await pubsub.publish(topic, data) - - await promise - - expect(result).to.have.property('recipients').with.lengthOf(1) - }) - }) - - describe('disabled', () => { - let pubsub: PubSub - let components: PubSubComponents - - before(async () => { - mockNetwork.reset() - components = await createComponents() - pubsub = components.pubsub = await common.setup({ - components, - init: { - emitSelf: false - } - }) - - await start(...Object.values(components)) - pubsub.subscribe(topic) - }) - - after(async () => { - sinon.restore() - await stop(...Object.values(components)) - await common.teardown() - mockNetwork.reset() - }) - - it('should not emit to self on publish', async () => { - pubsub.addEventListener('message', shouldNotHappen, { - once: true - }) - - await pubsub.publish(topic, data) - - // Wait 1 second to guarantee that self is not noticed - await new Promise((resolve) => setTimeout(resolve, 1000)) - }) - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/index.ts b/packages/interface-compliance-tests/src/pubsub/index.ts deleted file mode 100644 index bedc3f3c72..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/index.ts +++ /dev/null @@ -1,34 +0,0 @@ -import apiTest from './api.js' -import connectionHandlersTest from './connection-handlers.js' -import emitSelfTest from './emit-self.js' -import messagesTest from './messages.js' -import multipleNodesTest from './multiple-nodes.js' -import twoNodesTest from './two-nodes.js' -import type { TestSetup } from '../index.js' -import type { ComponentLogger, PeerId, PrivateKey, PubSub, PubSubInit } from '@libp2p/interface' -import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' - -export interface PubSubComponents { - peerId: PeerId - privateKey: PrivateKey - registrar: Registrar - connectionManager: ConnectionManager - pubsub?: PubSub - logger: ComponentLogger -} - -export interface PubSubArgs { - components: PubSubComponents - init: PubSubInit -} - -export default (common: TestSetup): void => { - describe('interface-pubsub compliance tests', () => { - apiTest(common) - emitSelfTest(common) - messagesTest(common) - connectionHandlersTest(common) - twoNodesTest(common) - multipleNodesTest(common) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/messages.ts b/packages/interface-compliance-tests/src/pubsub/messages.ts deleted file mode 100644 index 1d694672d4..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/messages.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import { pEvent } from 'p-event' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents } from './utils.js' -import type { PubSubArgs, PubSubComponents } from './index.js' -import type { TestSetup } from '../index.js' -import type { Message, PubSub } from '@libp2p/interface' - -const topic = 'foo' -const data = uint8ArrayFromString('bar') - -export default (common: TestSetup): void => { - describe('messages', () => { - let pubsub: PubSub - let components: PubSubComponents - - // Create pubsub router - beforeEach(async () => { - mockNetwork.reset() - components = await createComponents() - - pubsub = components.pubsub = await common.setup({ - components, - init: { - emitSelf: true - } - }) - await start(...Object.values(components)) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(components)) - await common.teardown() - mockNetwork.reset() - }) - - it('should emit normalized signed messages on publish', async () => { - const eventPromise = pEvent<'message', CustomEvent>(pubsub, 'message') - - pubsub.globalSignaturePolicy = 'StrictSign' - pubsub.subscribe(topic) - await pubsub.publish(topic, data) - - const event = await eventPromise - const message = event.detail - - if (message.type === 'signed') { - expect(message.from.toString()).to.equal(components.peerId.toString()) - expect(message.sequenceNumber).to.not.eql(undefined) - expect(message.key).to.not.eql(undefined) - expect(message.signature).to.not.eql(undefined) - } - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/multiple-nodes.ts b/packages/interface-compliance-tests/src/pubsub/multiple-nodes.ts deleted file mode 100644 index 8985f6e993..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/multiple-nodes.ts +++ /dev/null @@ -1,440 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 6] */ -import { start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import delay from 'delay' -import pDefer from 'p-defer' -import pWaitFor from 'p-wait-for' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents, waitForSubscriptionUpdate } from './utils.js' -import type { PubSubArgs, PubSubComponents } from './index.js' -import type { TestSetup } from '../index.js' -import type { Message, PubSub } from '@libp2p/interface' - -export default (common: TestSetup): void => { - describe('pubsub with multiple nodes', function () { - describe('every peer subscribes to the topic', () => { - describe('line', () => { - // line - // ◉────◉────◉ - // a b c - let psA: PubSub - let psB: PubSub - let psC: PubSub - let componentsA: PubSubComponents - let componentsB: PubSubComponents - let componentsC: PubSubComponents - - // Create and start pubsub nodes - beforeEach(async () => { - mockNetwork.reset() - - componentsA = await createComponents() - componentsB = await createComponents() - componentsC = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: { - emitSelf: true - } - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: { - emitSelf: true - } - }) - psC = componentsC.pubsub = await common.setup({ - components: componentsC, - init: { - emitSelf: true - } - }) - - // Start pubsub modes - await start(...Object.values(componentsA), ...Object.values(componentsB), ...Object.values(componentsC)) - - // Connect nodes - await componentsA.connectionManager.openConnection(componentsB.peerId) - await componentsB.connectionManager.openConnection(componentsC.peerId) - - // Wait for peers to be ready in pubsub - await pWaitFor(() => - psA.getPeers().length === 1 && - psC.getPeers().length === 1 && - psA.getPeers().length === 1 - ) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB), ...Object.values(componentsC)) - await common.teardown() - mockNetwork.reset() - }) - - it('subscribe to the topic on node a', async () => { - const topic = 'Z' - - psA.subscribe(topic) - expect(psA.getTopics()).to.deep.equal([topic]) - - await waitForSubscriptionUpdate(psB, componentsA.peerId) - - expect(psB.getPeers().length).to.equal(2) - expect(psB.getSubscribers(topic).map(p => p.toString())).to.deep.equal([componentsA.peerId.toString()]) - - expect(psC.getPeers().length).to.equal(1) - expect(psC.getSubscribers(topic)).to.be.empty() - }) - - it('subscribe to the topic on node b', async () => { - const topic = 'Z' - psB.subscribe(topic) - expect(psB.getTopics()).to.deep.equal([topic]) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psC, componentsB.peerId) - ]) - - expect(psA.getPeers().length).to.equal(1) - expect(psA.getSubscribers(topic).map(p => p.toString())).to.deep.equal([componentsB.peerId.toString()]) - - expect(psC.getPeers().length).to.equal(1) - expect(psC.getSubscribers(topic).map(p => p.toString())).to.deep.equal([componentsB.peerId.toString()]) - }) - - it('subscribe to the topic on node c', async () => { - const topic = 'Z' - const defer = pDefer() - - psC.subscribe(topic) - expect(psC.getTopics()).to.deep.equal([topic]) - - psB.addEventListener('subscription-change', () => { - expect(psA.getPeers().length).to.equal(1) - expect(psB.getPeers().length).to.equal(2) - expect(psB.getSubscribers(topic).map(p => p.toString())).to.deep.equal([componentsC.peerId.toString()]) - - defer.resolve() - }, { - once: true - }) - - return defer.promise - }) - - it('publish on node a', async () => { - const topic = 'Z' - const defer = pDefer() - - psA.subscribe(topic) - psB.subscribe(topic) - psC.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId), - waitForSubscriptionUpdate(psC, componentsB.peerId) - ]) - - // GossipSub needs time to build the mesh overlay - await delay(1000) - - let counter = 0 - - psA.addEventListener('message', incMsg) - psB.addEventListener('message', incMsg) - psC.addEventListener('message', incMsg) - - const result = await psA.publish(topic, uint8ArrayFromString('hey')) - - expect(result).to.have.property('recipients').with.property('length').greaterThanOrEqual(1) - - function incMsg (evt: CustomEvent): void { - const msg = evt.detail - - if (msg.topic !== topic) { - return - } - - expect(uint8ArrayToString(msg.data)).to.equal('hey') - check() - } - - function check (): void { - if (++counter === 3) { - psA.removeEventListener('message', incMsg) - psB.removeEventListener('message', incMsg) - psC.removeEventListener('message', incMsg) - defer.resolve() - } - } - - return defer.promise - }) - - // since the topology is the same, just the publish - // gets sent by other peer, we reused the same peers - describe('1 level tree', () => { - // 1 level tree - // ┌◉┐ - // │b│ - // ◉─┘ └─◉ - // a c - - it('publish on node b', async () => { - const topic = 'Z' - const defer = pDefer() - let counter = 0 - - psA.subscribe(topic) - psB.subscribe(topic) - psC.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId), - waitForSubscriptionUpdate(psC, componentsB.peerId) - ]) - - // GossipSub needs time to build the mesh overlay - await delay(1000) - - psA.addEventListener('message', incMsg) - psB.addEventListener('message', incMsg) - psC.addEventListener('message', incMsg) - - await psB.publish(topic, uint8ArrayFromString('hey')) - - function incMsg (evt: CustomEvent): void { - const msg = evt.detail - - if (msg.topic !== topic) { - return - } - - expect(uint8ArrayToString(msg.data)).to.equal('hey') - check() - } - - function check (): void { - if (++counter === 3) { - psA.removeEventListener('message', incMsg) - psB.removeEventListener('message', incMsg) - psC.removeEventListener('message', incMsg) - defer.resolve() - } - } - - return defer.promise - }) - }) - }) - - describe('2 level tree', () => { - // 2 levels tree - // ┌◉┐ - // │c│ - // ┌◉─┘ └─◉┐ - // │b d│ - // ◉─┘ └─◉ - // a - let psA: PubSub - let psB: PubSub - let psC: PubSub - let psD: PubSub - let psE: PubSub - let componentsA: PubSubComponents - let componentsB: PubSubComponents - let componentsC: PubSubComponents - let componentsD: PubSubComponents - let componentsE: PubSubComponents - - // Create and start pubsub nodes - beforeEach(async () => { - mockNetwork.reset() - - componentsA = await createComponents() - componentsB = await createComponents() - componentsC = await createComponents() - componentsD = await createComponents() - componentsE = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: { - emitSelf: true - } - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: { - emitSelf: true - } - }) - psC = componentsC.pubsub = await common.setup({ - components: componentsC, - init: { - emitSelf: true - } - }) - psD = componentsD.pubsub = await common.setup({ - components: componentsD, - init: { - emitSelf: true - } - }) - psE = componentsE.pubsub = await common.setup({ - components: componentsE, - init: { - emitSelf: true - } - }) - - // Start pubsub nodes - await start( - ...Object.values(componentsA), - ...Object.values(componentsB), - ...Object.values(componentsC), - ...Object.values(componentsD), - ...Object.values(componentsE) - ) - - // connect nodes - await componentsA.connectionManager.openConnection(componentsB.peerId) - await componentsB.connectionManager.openConnection(componentsC.peerId) - await componentsC.connectionManager.openConnection(componentsD.peerId) - await componentsD.connectionManager.openConnection(componentsE.peerId) - - // Wait for peers to be ready in pubsub - await pWaitFor(() => - psA.getPeers().length === 1 && - psB.getPeers().length === 2 && - psC.getPeers().length === 2 && - psD.getPeers().length === 2 && - psE.getPeers().length === 1 - ) - }) - - afterEach(async () => { - await stop( - ...Object.values(componentsA), - ...Object.values(componentsB), - ...Object.values(componentsC), - ...Object.values(componentsD), - ...Object.values(componentsE) - ) - await common.teardown() - mockNetwork.reset() - }) - - it('subscribes', () => { - psA.subscribe('Z') - expect(psA.getTopics()).to.deep.equal(['Z']) - psB.subscribe('Z') - expect(psB.getTopics()).to.deep.equal(['Z']) - psC.subscribe('Z') - expect(psC.getTopics()).to.deep.equal(['Z']) - psD.subscribe('Z') - expect(psD.getTopics()).to.deep.equal(['Z']) - psE.subscribe('Z') - expect(psE.getTopics()).to.deep.equal(['Z']) - }) - - it('publishes from c', async function () { - const defer = pDefer() - let counter = 0 - const topic = 'Z' - - psA.subscribe(topic) - psA.addEventListener('message', incMsg) - psB.subscribe(topic) - psB.addEventListener('message', incMsg) - psC.subscribe(topic) - psC.addEventListener('message', incMsg) - psD.subscribe(topic) - psD.addEventListener('message', incMsg) - psE.subscribe(topic) - psE.addEventListener('message', incMsg) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId), - waitForSubscriptionUpdate(psC, componentsB.peerId), - waitForSubscriptionUpdate(psD, componentsC.peerId), - waitForSubscriptionUpdate(psE, componentsD.peerId) - ]) - - // GossipSub needs time to build the mesh overlay - await delay(1000) - - await psC.publish('Z', uint8ArrayFromString('hey from c')) - - function incMsg (evt: CustomEvent): void { - const msg = evt.detail - - if (msg.topic !== topic) { - return - } - - expect(uint8ArrayToString(msg.data)).to.equal('hey from c') - check() - } - - function check (): void { - if (++counter === 5) { - psA.unsubscribe('Z') - psB.unsubscribe('Z') - psC.unsubscribe('Z') - psD.unsubscribe('Z') - psE.unsubscribe('Z') - defer.resolve() - } - } - - return defer.promise - }) - }) - }) - - describe('only some nodes subscribe the networks', () => { - describe('line', () => { - // line - // ◉────◎────◉ - // a b c - - before(() => { }) - after(() => { }) - }) - - describe('1 level tree', () => { - // 1 level tree - // ┌◉┐ - // │b│ - // ◎─┘ └─◉ - // a c - - before(() => { }) - after(() => { }) - }) - - describe('2 level tree', () => { - // 2 levels tree - // ┌◉┐ - // │c│ - // ┌◎─┘ └─◉┐ - // │b d│ - // ◉─┘ └─◎ - // a e - - before(() => { }) - after(() => { }) - }) - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/two-nodes.ts b/packages/interface-compliance-tests/src/pubsub/two-nodes.ts deleted file mode 100644 index 6e9f857181..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/two-nodes.ts +++ /dev/null @@ -1,272 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 6] */ -import { TopicValidatorResult, start, stop } from '@libp2p/interface' -import { expect } from 'aegir/chai' -import pDefer from 'p-defer' -import pWaitFor from 'p-wait-for' -import sinon from 'sinon' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { mockNetwork } from '../mocks/index.js' -import { createComponents, waitForSubscriptionUpdate } from './utils.js' -import type { PubSubArgs, PubSubComponents } from './index.js' -import type { TestSetup } from '../index.js' -import type { Message, PubSub } from '@libp2p/interface' - -const topic = 'foo' - -function shouldNotHappen (): void { - expect.fail() -} - -export default (common: TestSetup): void => { - describe('pubsub with two nodes', () => { - let psA: PubSub - let psB: PubSub - let componentsA: PubSubComponents - let componentsB: PubSubComponents - - // Create pubsub nodes and connect them - beforeEach(async () => { - mockNetwork.reset() - - componentsA = await createComponents() - componentsB = await createComponents() - - psA = componentsA.pubsub = await common.setup({ - components: componentsA, - init: { - emitSelf: true - } - }) - psB = componentsB.pubsub = await common.setup({ - components: componentsB, - init: { - emitSelf: false - } - }) - - // Start pubsub and connect nodes - await start(...Object.values(componentsA), ...Object.values(componentsB)) - - expect(psA.getPeers()).to.be.empty() - expect(psB.getPeers()).to.be.empty() - - await componentsA.connectionManager.openConnection(componentsB.peerId) - - // Wait for peers to be ready in pubsub - await pWaitFor(() => psA.getPeers().length === 1 && psB.getPeers().length === 1) - }) - - afterEach(async () => { - sinon.restore() - await stop(...Object.values(componentsA), ...Object.values(componentsB)) - await common.teardown() - mockNetwork.reset() - }) - - it('Subscribe to a topic in nodeA', async () => { - const defer = pDefer() - - psB.addEventListener('subscription-change', (evt) => { - const { peerId: changedPeerId, subscriptions: changedSubs } = evt.detail - expect(psA.getTopics()).to.deep.equal([topic]) - expect(psB.getPeers()).to.have.lengthOf(1) - expect(psB.getSubscribers(topic).map(p => p.toString())).to.deep.equal([componentsA.peerId.toString()]) - expect(changedPeerId.toString()).to.equal(psB.getPeers()[0].toString()) - expect(changedSubs).to.have.lengthOf(1) - expect(changedSubs[0].topic).to.equal(topic) - expect(changedSubs[0].subscribe).to.equal(true) - defer.resolve() - }, { - once: true - }) - psA.subscribe(topic) - - return defer.promise - }) - - it('Publish to a topic in nodeA', async () => { - const defer = pDefer() - - psA.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - const msg = evt.detail - expect(uint8ArrayToString(msg.data)).to.equal('hey') - psB.removeEventListener('message', shouldNotHappen) - defer.resolve() - } - }, { - once: true - }) - - psA.subscribe(topic) - psB.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId) - ]) - - await psA.publish(topic, uint8ArrayFromString('hey')) - - return defer.promise - }) - - it('Publish to a topic in nodeB', async () => { - const defer = pDefer() - - psA.addEventListener('message', (evt) => { - if (evt.detail.topic !== topic) { - return - } - - const msg = evt.detail - psA.addEventListener('message', (evt) => { - if (evt.detail.topic === topic) { - shouldNotHappen() - } - }, { - once: true - }) - expect(uint8ArrayToString(msg.data)).to.equal('banana') - - setTimeout(() => { - psA.removeEventListener('message') - psB.removeEventListener('message') - - defer.resolve() - }, 100) - }, { - once: true - }) - - psB.addEventListener('message', shouldNotHappen) - - psA.subscribe(topic) - psB.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId) - ]) - - await psB.publish(topic, uint8ArrayFromString('banana')) - - return defer.promise - }) - - it('validate topic message', async () => { - const defer = pDefer() - - psA.subscribe(topic) - - psB.topicValidators.set(topic, (peer, message) => { - if (!peer.equals(componentsA.peerId)) { - defer.reject(new Error('Invalid peer id in topic validator fn')) - return TopicValidatorResult.Reject - } - - if (uint8ArrayToString(message.data) !== 'hey') { - defer.reject(new Error('Invalid message in topic validator fn')) - return TopicValidatorResult.Reject - } - - defer.resolve() - return TopicValidatorResult.Accept - }) - psB.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId) - ]) - - await psA.publish(topic, uint8ArrayFromString('hey')) - - return defer.promise - }) - - it('Publish 10 msg to a topic in nodeB', async () => { - const defer = pDefer() - let counter = 0 - - psB.addEventListener('message', shouldNotHappen) - psA.addEventListener('message', receivedMsg) - - function receivedMsg (evt: CustomEvent): void { - const msg = evt.detail - if (msg.type === 'unsigned') { - expect(uint8ArrayToString(msg.data)).to.equal('banana') - expect(msg.topic).to.be.equal(topic) - } else { - expect(uint8ArrayToString(msg.data)).to.equal('banana') - expect(msg.from.toString()).to.equal(componentsB.peerId.toString()) - expect(msg.sequenceNumber).to.be.a('BigInt') - expect(msg.topic).to.be.equal(topic) - } - - if (++counter === 10) { - psA.removeEventListener('message', receivedMsg) - psB.removeEventListener('message', shouldNotHappen) - - defer.resolve() - } - } - - psA.subscribe(topic) - psB.subscribe(topic) - - await Promise.all([ - waitForSubscriptionUpdate(psA, componentsB.peerId), - waitForSubscriptionUpdate(psB, componentsA.peerId) - ]) - - await Promise.all( - Array.from({ length: 10 }, async (_, i) => { - await psB.publish(topic, uint8ArrayFromString('banana')) - }) - ) - - return defer.promise - }) - - it('Unsubscribe from topic in nodeA', async () => { - const defer = pDefer() - let callCount = 0 - - psB.addEventListener('subscription-change', (evt) => { - callCount++ - - if (callCount === 1) { - // notice subscribe - const { peerId: changedPeerId, subscriptions: changedSubs } = evt.detail - expect(psB.getPeers()).to.have.lengthOf(1) - expect(psB.getTopics()).to.be.empty() - expect(changedPeerId.toString()).to.equal(psB.getPeers()[0].toString()) - expect(changedSubs).to.have.lengthOf(1) - expect(changedSubs[0].topic).to.equal(topic) - expect(changedSubs[0].subscribe).to.equal(true) - } else { - // notice unsubscribe - const { peerId: changedPeerId, subscriptions: changedSubs } = evt.detail - expect(psB.getPeers()).to.have.lengthOf(1) - expect(psB.getTopics()).to.be.empty() - expect(changedPeerId.toString()).to.equal(psB.getPeers()[0].toString()) - expect(changedSubs).to.have.lengthOf(1) - expect(changedSubs[0].topic).to.equal(topic) - expect(changedSubs[0].subscribe).to.equal(false) - - defer.resolve() - } - }) - - psA.subscribe(topic) - expect(psA.getTopics()).to.not.be.empty() - - psA.unsubscribe(topic) - expect(psA.getTopics()).to.be.empty() - - return defer.promise - }) - }) -} diff --git a/packages/interface-compliance-tests/src/pubsub/utils.ts b/packages/interface-compliance-tests/src/pubsub/utils.ts deleted file mode 100644 index 05d8635ae7..0000000000 --- a/packages/interface-compliance-tests/src/pubsub/utils.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { generateKeyPair } from '@libp2p/crypto/keys' -import { defaultLogger } from '@libp2p/logger' -import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { TypedEventEmitter } from 'main-event' -import { pEvent } from 'p-event' -import pWaitFor from 'p-wait-for' -import { mockConnectionManager, mockRegistrar, mockNetwork } from '../mocks/index.js' -import type { MockNetworkComponents } from '../mocks/index.js' -import type { PeerId, PubSub, SubscriptionChangeData } from '@libp2p/interface' - -export async function waitForSubscriptionUpdate (a: PubSub, b: PeerId): Promise { - await pWaitFor(async () => { - const event = await pEvent<'subscription-change', CustomEvent>(a, 'subscription-change') - - return event.detail.peerId.equals(b) - }) -} - -export async function createComponents (): Promise { - const privateKey = await generateKeyPair('Ed25519') - - const components: any = { - peerId: peerIdFromPrivateKey(privateKey), - privateKey, - registrar: mockRegistrar(), - events: new TypedEventEmitter(), - logger: defaultLogger() - } - components.connectionManager = mockConnectionManager(components) - - mockNetwork.addNode(components) - - return components -} diff --git a/packages/interface-compliance-tests/src/stream-muxer/base-test.ts b/packages/interface-compliance-tests/src/stream-muxer/base-test.ts index fb7ef9ea07..860deaa001 100644 --- a/packages/interface-compliance-tests/src/stream-muxer/base-test.ts +++ b/packages/interface-compliance-tests/src/stream-muxer/base-test.ts @@ -1,458 +1,120 @@ +import { multiaddrConnectionPair, byteStream } from '@libp2p/utils' import { expect } from 'aegir/chai' -import all from 'it-all' -import { byteStream } from 'it-byte-stream' -import map from 'it-map' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' -import defer from 'p-defer' -import { Uint8ArrayList } from 'uint8arraylist' +import { raceEvent } from 'race-event' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { isValidTick } from '../is-valid-tick.js' import type { TestSetup } from '../index.js' -import type { Stream, StreamMuxerFactory } from '@libp2p/interface' -import type { Source } from 'it-stream-types' -import type { DeferredPromise } from 'p-defer' +import type { Stream, StreamMuxer, StreamMuxerFactory } from '@libp2p/interface' export default (common: TestSetup): void => { describe('base', () => { - it('should open a stream from the dialer', async () => { - const p = duplexPair() - const onStreamPromise: DeferredPromise = defer() - const onStreamEndPromise: DeferredPromise = defer() + let dialer: StreamMuxer + let listener: StreamMuxer + + beforeEach(async () => { + const [outboundConnection, inboundConnection] = multiaddrConnectionPair() const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) + dialer = dialerFactory.createStreamMuxer(outboundConnection) const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - onStreamPromise.resolve(stream) - }, - onStreamEnd: (stream) => { - onStreamEndPromise.resolve(stream) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const dialerStream = await dialer.newStream() - expect(dialer.streams).to.include(dialerStream) - expect(isValidTick(dialerStream.timeline.open)).to.equal(true) - - const dialerBytes = byteStream(dialerStream) - void dialerBytes.write(uint8ArrayFromString('hello')) + listener = listenerFactory.createStreamMuxer(inboundConnection) + }) - const listenerStream = await onStreamPromise.promise - expect(isValidTick(listenerStream.timeline.open)).to.equal(true) - // Make sure the stream is being tracked - expect(listener.streams).to.include(listenerStream) + afterEach(async () => { + await dialer?.close() + await listener?.close() + }) - await dialerStream.close() - await listenerStream.close() + it('should have a protocol', async () => { + expect(dialer.protocol).to.be.a('string') + }) - // Make sure stream is closed properly - const endedStream = await onStreamEndPromise.promise - expect(listener.streams).to.not.include(endedStream) + it('should be open', async () => { + expect(dialer.status).to.equal('open') + }) - if (endedStream.timeline.close == null) { - throw new Error('timeline had no close time') - } + it('should be closing during closing', async () => { + const closePromise = dialer.close() + expect(dialer.status).to.equal('closing') - // Make sure the stream is removed from tracking - expect(isValidTick(endedStream.timeline.close)).to.equal(true) + await closePromise + }) + it('should be closed after closing', async () => { await dialer.close() - await listener.close() - // ensure we have no streams left - expect(dialer.streams).to.have.length(0) - expect(listener.streams).to.have.length(0) + expect(dialer.status).to.equal('closed') }) - it('should open a stream from the listener', async () => { - const p = duplexPair() - const onStreamPromise: DeferredPromise = defer() - const onStreamEndPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream: Stream) => { - onStreamPromise.resolve(stream) - }, - onStreamEnd: (stream) => { - onStreamEndPromise.resolve(stream) - } - }) - - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound' - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + it('should open a stream', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) - const listenerStream = await listener.newStream() + const dialerBytes = byteStream(dialerStream) const listenerBytes = byteStream(listenerStream) - void listenerBytes.write(uint8ArrayFromString('hello')) - - const dialerStream = await onStreamPromise.promise - expect(isValidTick(dialerStream.timeline.open)).to.equal(true) - expect(listener.streams).to.include(listenerStream) - expect(isValidTick(listenerStream.timeline.open)).to.equal(true) + const input = uint8ArrayFromString('hello') - await dialerStream.close() - await listenerStream.close() - - // Make sure stream is closed properly - const endedStream = await onStreamEndPromise.promise - expect(dialer.streams).to.not.include(endedStream) - - if (endedStream.timeline.close == null) { - throw new Error('timeline had no close time') - } - - // Make sure the stream is removed from tracking - expect(isValidTick(endedStream.timeline.close)).to.equal(true) + const [, output] = await Promise.all([ + dialerBytes.write(input), + listenerBytes.read() + ]) - await dialer.close() - await listener.close() + expect(output?.subarray()).to.equalBytes(input.subarray()) }) it('should open a stream on both sides', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onListenerStreamPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - onListenerStreamPromise.resolve(stream) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + const [ + listenerInboundStream, + dialerOutboundStream, - const dialerInitiatorStream = await dialer.newStream() - const listenerInitiatorStream = await listener.newStream() + dialerInboundStream, + listenerOutboundStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream(), - await Promise.all([ - dialerInitiatorStream.close(), - listenerInitiatorStream.close(), - onDialerStreamPromise.promise.then(async stream => { await stream.close() }), - onListenerStreamPromise.promise.then(async stream => { await stream.close() }) + raceEvent>(dialer, 'stream').then(evt => evt.detail), + listener.createStream() ]) - await Promise.all([ - dialer.close(), - listener.close() - ]) - }) + const dialerOutboundBytes = byteStream(dialerOutboundStream) + const listenerInboundBytes = byteStream(listenerInboundStream) - it('should open a stream on one side, write, open a stream on the other side', async () => { - const toString = (source: Source): AsyncGenerator => map(source, (u) => uint8ArrayToString(u.subarray())) - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onListenerStreamPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - onListenerStreamPromise.resolve(stream) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + const listenerOutboundBytes = byteStream(listenerOutboundStream) + const dialerInboundBytes = byteStream(dialerInboundStream) - const dialerConn = await dialer.newStream() - const listenerConn = await listener.newStream() + const inputA = uint8ArrayFromString('hello') + const inputB = uint8ArrayFromString('world') - void pipe([new Uint8ArrayList(uint8ArrayFromString('hey'))], dialerConn) - void pipe([new Uint8ArrayList(uint8ArrayFromString('hello'))], listenerConn) + const [, outputA] = await Promise.all([ + dialerOutboundBytes.write(inputA), + listenerInboundBytes.read(), - const [ - dialerStream, - listenerStream - ] = await Promise.all([ - onDialerStreamPromise.promise, - onListenerStreamPromise.promise + listenerOutboundBytes.write(inputB), + dialerInboundBytes.read() ]) + expect(outputA?.subarray()).to.equalBytes(inputA.subarray()) + expect(inputB?.subarray()).to.equalBytes(inputB.subarray()) + }) + + it('should store a stream in the streams list', async () => { const [ - listenerChunks, - dialerChunks + listenerStream, + dialerStream ] = await Promise.all([ - pipe(listenerStream, toString, async (source) => all(source)), - pipe(dialerStream, toString, async (source) => all(source)) + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() ]) - expect(listenerChunks).to.be.eql(['hey']) - expect(dialerChunks).to.be.eql(['hello']) - }) - - it('should echo a small value via a pipe', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from([0, 1, 2, 3, 4]) - - await pipe( - [input], - stream - ) - await stream.close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) - }) - - it('should echo a large value via a pipe', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from(new Array(1024 * 1024 * 10).fill(0)) - - await pipe( - [input], - stream - ) - await stream.close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) - }) - - it('should echo a small value via sink', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from([0, 1, 2, 3, 4]) - - await stream.sink([input]) - await stream.close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) - }) - - it('should echo a large value via sink', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from(new Array(1024 * 1024 * 10).fill(0)) - - await stream.sink([input]) - await stream.close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) - }) - - it('should echo a small value via a pushable', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from([0, 1, 2, 3, 4]) - - const pushable = byteStream(stream) - await pushable.write(input) - await pushable.unwrap().close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) - }) - - it('should echo a large value via a pushable', async () => { - const p = duplexPair() - const onDialerStreamPromise: DeferredPromise = defer() - const onDataReceivedPromise: DeferredPromise = defer() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onIncomingStream: (stream) => { - onDialerStreamPromise.resolve(stream) - } - }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { - const output = new Uint8ArrayList() - - for await (const buf of stream.source) { - output.append(buf) - } - - onDataReceivedPromise.resolve(output.subarray()) - }) - } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - const input = Uint8Array.from(new Array(1024 * 1024 * 10).fill(0)) - - const pushable = byteStream(stream) - await pushable.write(input) - await pushable.unwrap().close() - - expect(await onDataReceivedPromise.promise).to.equalBytes(input) + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') }) }) } diff --git a/packages/interface-compliance-tests/src/stream-muxer/close-test.ts b/packages/interface-compliance-tests/src/stream-muxer/close-test.ts index d3603f4bbb..7849842d60 100644 --- a/packages/interface-compliance-tests/src/stream-muxer/close-test.ts +++ b/packages/interface-compliance-tests/src/stream-muxer/close-test.ts @@ -1,104 +1,76 @@ /* eslint max-nested-callbacks: ["error", 8] */ -import { abortableSource } from 'abortable-iterator' +import { multiaddrConnectionPair, echo, pbStream } from '@libp2p/utils' import { expect } from 'aegir/chai' import delay from 'delay' import all from 'it-all' -import drain from 'it-drain' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' -import { pbStream } from 'it-protobuf-stream' -import toBuffer from 'it-to-buffer' -import pDefer from 'p-defer' +import map from 'it-map' +import { raceEvent } from 'race-event' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { Message } from './fixtures/pb/message.js' import type { TestSetup } from '../index.js' -import type { StreamMuxerFactory } from '@libp2p/interface' +import type { MultiaddrConnection, Stream, StreamMuxer, StreamMuxerFactory } from '@libp2p/interface' function randomBuffer (): Uint8Array { return uint8ArrayFromString(Math.random().toString()) } -function infiniteRandom (): AsyncGenerator { - let done: Error | boolean = false - - const generator: AsyncGenerator = { - [Symbol.asyncIterator]: () => { - return generator - }, - async next () { - await delay(10) - - if (done instanceof Error) { - throw done - } +async function * infiniteRandom (): AsyncGenerator { + while (true) { + await delay(10) + yield new Uint8ArrayList(randomBuffer()) + } +} - if (done) { - return { - done: true, - value: undefined - } - } +export default (common: TestSetup): void => { + describe('close', () => { + let outboundConnection: MultiaddrConnection + let inboundConnection: MultiaddrConnection + let dialer: StreamMuxer + let listener: StreamMuxer - return { - done: false, - value: new Uint8ArrayList(randomBuffer()) - } - }, - async return (): Promise> { - done = true + beforeEach(async () => { + [outboundConnection, inboundConnection] = multiaddrConnectionPair() - return { - done: true, - value: undefined - } - }, - async throw (err: Error): Promise> { - done = err + const dialerFactory = await common.setup() + dialer = dialerFactory.createStreamMuxer(outboundConnection) - return { - done: true, - value: undefined - } - } - } + const listenerFactory = await common.setup() + listener = listenerFactory.createStreamMuxer(inboundConnection) + }) - return generator -} + afterEach(async () => { + await dialer?.close() + await listener?.close() + }) -export default (common: TestSetup): void => { - describe('close', () => { - it('closing underlying socket closes streams', async () => { + it('closing underlying MultiaddrConnection closes streams', async () => { let openedStreams = 0 const expectedStreams = 5 - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - // Listener is echo server :) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - openedStreams++ - void pipe(stream, stream) - } - }) + listener.addEventListener('stream', (evt) => { + openedStreams++ - const p = duplexPair() - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + echo(evt.detail) + }) - const streams = await Promise.all(Array(expectedStreams).fill(0).map(async () => dialer.newStream())) + const streams = await Promise.all( + Array(expectedStreams).fill(0).map(async () => dialer.createStream()) + ) void Promise.all( streams.map(async stream => { - await pipe( - infiniteRandom(), - stream, - drain - ) + for await (const buf of infiniteRandom()) { + if (stream.status !== 'open') { + return + } + + const sendMore = stream.send(buf) + + if (!sendMore) { + await raceEvent(stream, 'drain') + } + } }) ) @@ -106,7 +78,9 @@ export default (common: TestSetup): void => { // Pause, and then close the dialer await delay(50) - await pipe(async function * () {}, dialer, drain) + await inboundConnection.closeWrite() + await outboundConnection.closeWrite() + await delay(50) expect(openedStreams).to.have.equal(expectedStreams) expect(dialer.streams).to.have.lengthOf(0) @@ -115,36 +89,33 @@ export default (common: TestSetup): void => { it('calling close closes streams', async () => { let openedStreams = 0 const expectedStreams = 5 - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - // Listener is echo server :) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - openedStreams++ - void pipe(stream, stream).catch(() => {}) - } - }) + listener.addEventListener('stream', (evt) => { + openedStreams++ - const p = duplexPair() - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + echo(evt.detail) + }) - const streams = await Promise.all(Array(expectedStreams).fill(0).map(async () => dialer.newStream())) + const streams = await Promise.all(Array(expectedStreams).fill(0).map(async () => dialer.createStream())) void Promise.all( streams.map(async stream => { - await pipe( - infiniteRandom(), - stream, - drain - ) + for await (const buf of infiniteRandom()) { + if (stream.status !== 'open') { + return + } + + const sendMore = stream.send(buf) + + if (!sendMore) { + await raceEvent(stream, 'drain') + } + } }) ) + .catch(() => { + // calling .send on a closed stream will throw so swallow any errors + }) expect(dialer.streams, 'dialer - number of opened streams should match number of calls to newStream').to.have.lengthOf(expectedStreams) @@ -153,346 +124,319 @@ export default (common: TestSetup): void => { await dialer.close() + await delay(50) + expect(openedStreams, 'listener - number of opened streams should match number of calls to newStream').to.have.equal(expectedStreams) expect(dialer.streams, 'all tracked streams should be deleted after the muxer has called close').to.have.lengthOf(0) }) - it('calling close with an error aborts streams', async () => { + it('calling abort aborts streams', async () => { let openedStreams = 0 const expectedStreams = 5 - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - // Listener is echo server :) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - openedStreams++ - void pipe(stream, stream).catch(() => {}) - } - }) + listener.addEventListener('stream', (evt) => { + openedStreams++ - const p = duplexPair() - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + echo(evt.detail) + }) - const streams = await Promise.all(Array(expectedStreams).fill(0).map(async () => dialer.newStream())) + const streams = await Promise.all( + Array(expectedStreams).fill(0).map(async () => dialer.createStream()) + ) const streamPipes = streams.map(async stream => { - await pipe( - infiniteRandom(), - stream, - drain - ) - }) - - expect(dialer.streams, 'dialer - number of opened streams should match number of calls to newStream').to.have.lengthOf(expectedStreams) + for await (const buf of infiniteRandom()) { + const sendMore = stream.send(buf) - // Pause, and then close the dialer - await delay(50) + if (!sendMore) { + await raceEvent(stream, 'drain', undefined, { + errorEvent: 'close' + }) + } + } + }) - // close _with an error_ - dialer.abort(new Error('Oh no!')) + expect(dialer.streams).to.have.lengthOf(expectedStreams, 'dialer - number of opened streams should match number of calls to createStream') const timeoutError = new Error('timeout') - for (const pipe of streamPipes) { - try { - await Promise.race([ - pipe, - new Promise((_resolve, reject) => setTimeout(() => { reject(timeoutError) }, 20)) - ]) - expect.fail('stream pipe with infinite source should never return') - } catch (e) { - if (e === timeoutError) { - expect.fail('expected stream pipe to throw an error after muxer closed with error') + + await Promise.all([ + // Pause, and then close the dialer + delay(50).then(() => { + // close _with an error_ + dialer.abort(new Error('Oh no!')) + }), + ...streamPipes.map(async pipe => { + try { + await Promise.race([ + pipe, + new Promise((resolve, reject) => { + setTimeout(() => { + reject(timeoutError) + }, 70) + }) + ]) + expect.fail('stream pipe with infinite source should never return') + } catch (e) { + if (e === timeoutError) { + expect.fail('expected stream pipe to throw an error after muxer closed with error') + } } - } - } + }) + ]) - expect(openedStreams, 'listener - number of opened streams should match number of calls to newStream').to.have.equal(expectedStreams) - expect(dialer.streams, 'all tracked streams should be deleted after the muxer has called close').to.have.lengthOf(0) + expect(openedStreams).to.equal(expectedStreams, 'listener - number of opened streams should match number of calls to createStream') + expect(dialer.streams).to.have.lengthOf(0, 'all tracked streams should be deleted after the muxer has called close') }) it('calling newStream after close throws an error', async () => { - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - await dialer.close() - - try { - await dialer.newStream() - expect.fail('newStream should throw if called after close') - } catch (e) { - expect(dialer.streams, 'closed muxer should have no streams').to.have.lengthOf(0) - } + await expect(dialer.createStream()).to.eventually.rejected.with.property('name', 'MuxerClosedError') + expect(dialer.streams).to.have.lengthOf(0, 'closed muxer should have no streams') }) it('closing one of the muxed streams doesn\'t close others', async () => { - const p = duplexPair() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) + const streamCount = 5 + const allStreamsOpen = Promise.withResolvers() - // Listener is echo server :) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void pipe(stream, stream).catch(() => {}) + listener.addEventListener('stream', (evt) => { + echo(evt.detail).catch(() => {}) + + if (listener.streams.length === streamCount) { + allStreamsOpen.resolve() } }) - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + const streams = await Promise.all( + Array.from(Array(streamCount), async () => dialer.createStream()) + ) + await allStreamsOpen.promise - const stream = await dialer.newStream() - const streams = await Promise.all(Array.from(Array(5), async () => dialer.newStream())) - let closed = false - const controllers: AbortController[] = [] + expect(dialer.streams).to.have.lengthOf(streamCount) + expect(listener.streams).to.have.lengthOf(streamCount) - const streamResults = streams.map(async stream => { - const controller = new AbortController() - controllers.push(controller) + expect(dialer.streams.map(s => s.status)).to.deep.equal(new Array(streamCount).fill('open')) + expect(listener.streams.map(s => s.status)).to.deep.equal(new Array(streamCount).fill('open')) - try { - const abortableRand = abortableSource(infiniteRandom(), controller.signal, { - abortName: 'TestAbortError' - }) - await pipe(abortableRand, stream, drain) - } catch (err: any) { - if (err.name !== 'TestAbortError') { throw err } - } - - if (!closed) { throw new Error('stream should not have ended yet!') } - }) + const localStream = streams[0] + const remoteStream = listener.streams[0] - // Pause, and then send some data and close the first stream - await delay(50) - await pipe([new Uint8ArrayList(randomBuffer())], stream, drain) - closed = true + await Promise.all([ + raceEvent(remoteStream, 'close'), + localStream.closeWrite() + ]) - // Abort all the other streams later - await delay(50) - controllers.forEach(c => { c.abort() }) + expect(dialer.streams).to.have.lengthOf(streamCount - 1) + expect(listener.streams).to.have.lengthOf(streamCount - 1) - // These should now all resolve without error - await Promise.all(streamResults) + expect(dialer.streams.map(s => s.status)).to.deep.equal(new Array(streamCount - 1).fill('open')) + expect(listener.streams.map(s => s.status)).to.deep.equal(new Array(streamCount - 1).fill('open')) }) it('can close a stream for writing', async () => { - const deferred = pDefer() - - const p = duplexPair() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - const data = [randomBuffer(), randomBuffer()] + const deferred = Promise.withResolvers() + const data = [Uint8Array.from([0, 1, 2, 3, 4]), Uint8Array.from([5, 6, 7, 8, 9])] - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void Promise.resolve().then(async () => { + listener.addEventListener('stream', (evt) => { + void Promise.resolve().then(async () => { + try { // Immediate close for write - await stream.closeWrite() - - const results = await pipe(stream, async (source) => { - const data = [] - for await (const chunk of source) { - data.push(chunk.slice()) - } - return data + await evt.detail.closeWrite({ + signal: AbortSignal.timeout(1_000) }) - expect(results).to.eql(data) + + const results = await all(map(evt.detail, (buf) => { + return buf.subarray() + })) + + expect(results).to.deep.equal(data) try { - await stream.sink([new Uint8ArrayList(randomBuffer())]) + evt.detail.send(randomBuffer()) } catch (err: any) { deferred.resolve(err) } - deferred.reject(new Error('should not support writing to closed writer')) - }) - } + throw new Error('should not support writing to closed writer') + } catch (err) { + deferred.reject(err) + } + }) }) - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - await stream.sink(data) - - const err = await deferred.promise - expect(err).to.have.property('name', 'StreamStateError') - }) - - it('can close a stream for reading', async () => { - const deferred = pDefer() - const p = duplexPair() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) - const data = [randomBuffer(), randomBuffer()].map(d => new Uint8ArrayList(d)) - const expected = toBuffer(data.map(d => d.subarray())) + const stream = await dialer.createStream() - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void all(stream.source).then(deferred.resolve, deferred.reject) + for (const buf of data) { + if (!stream.send(buf)) { + await raceEvent(stream, 'drain') } - }) - - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - - const stream = await dialer.newStream() - await stream.closeRead() + } - // Source should be done - void Promise.resolve().then(async () => { - expect(await stream.source.next()).to.have.property('done', true) - await stream.sink(data) + await stream.closeWrite({ + signal: AbortSignal.timeout(1_000) }) - const results = await deferred.promise - expect(toBuffer(results.map(b => b.subarray()))).to.equalBytes(expected) + const err = await deferred.promise + expect(err).to.have.property('name', 'StreamStateError') }) - it('calls onStreamEnd for closed streams not previously written', async () => { - const deferred = pDefer() - - const onStreamEnd = (): void => { deferred.resolve() } - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onStreamEnd + it('should emit a close event for closed streams not previously written', async () => { + listener.addEventListener('stream', async (evt) => { + void evt.detail.closeWrite() }) - const stream = await dialer.newStream() + const deferred = Promise.withResolvers() + const stream = await dialer.createStream() + stream.addEventListener('close', () => { + deferred.resolve() + }) - await stream.close() + await stream.closeWrite() await deferred.promise }) - it('calls onStreamEnd for read and write closed streams not previously written', async () => { - const deferred = pDefer() - - const onStreamEnd = (): void => { deferred.resolve() } - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound', - onStreamEnd + it('should emit a close event for aborted streams not previously written', async () => { + const deferred = Promise.withResolvers() + const stream = await dialer.createStream() + stream.addEventListener('close', () => { + deferred.resolve() }) - const stream = await dialer.newStream() - - await stream.closeWrite() - await stream.closeRead() + stream.abort(new Error('Urk!')) await deferred.promise }) it('should wait for all data to be sent when closing streams', async () => { - const deferred = pDefer() + const deferred = Promise.withResolvers() - const p = duplexPair() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ - direction: 'outbound' - }) + listener.addEventListener('stream', (evt) => { + const pb = pbStream(evt.detail) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - const pb = pbStream(stream) - - void pb.read(Message) - .then(async message => { - deferred.resolve(message) - await pb.unwrap().close() - }) - .catch(err => { - deferred.reject(err) - }) - } + void pb.read(Message) + .then(async message => { + deferred.resolve(message) + await evt.detail.closeWrite() + }) + .catch(err => { + deferred.reject(err) + }) }) - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) - const message = { message: 'hello world', value: 5, flag: true } - const stream = await dialer.newStream() + const stream = await dialer.createStream() const pb = pbStream(stream) await pb.write(message, Message) - await pb.unwrap().close() + await stream.closeWrite() await expect(deferred.promise).to.eventually.deep.equal(message) }) - /* - it('should abort closing a stream with outstanding data to read', async () => { - const deferred = pDefer() - const p = duplexPair() - const dialerFactory = await common.setup() - const dialer = dialerFactory.createStreamMuxer({ direction: 'outbound' }) + it('should remove a stream in the streams list after aborting', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) + + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') + + await Promise.all([ + raceEvent(listenerStream, 'close'), + dialerStream.abort(new Error('Urk!')) + ]) + + expect(dialer.streams).to.not.include(dialerStream, 'dialer did not remove outbound stream close') + expect(listener.streams).to.not.include(listenerStream, 'listener did not remove inbound stream after close') + }) - const listenerFactory = await common.setup() - const listener = listenerFactory.createStreamMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - const pb = pbStream(stream) - - void pb.read(Message) - .then(async message => { - await pb.write(message, Message) - await pb.unwrap().close() - deferred.resolve(message) - }) - .catch(err => { - deferred.reject(err) - }) - } - }) + it('should remove a stream in the streams list after closing', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) - void pipe(p[0], dialer, p[0]) - void pipe(p[1], listener, p[1]) + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') - const message = { - message: 'hello world', - value: 5, - flag: true - } + await Promise.all([ + dialerStream.closeWrite(), + listenerStream.closeWrite() + ]) - const stream = await dialer.newStream() + await delay(10) - const pb = pbStream(stream) - await pb.write(message, Message) + expect(dialer.streams).to.not.include(dialerStream, 'dialer did not remove outbound stream close') + expect(listener.streams).to.not.include(listenerStream, 'listener did not remove inbound stream after close') + }) - console.info('await write back') - await deferred.promise + it('should not remove a half-closed outbound stream', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) + + await dialerStream.closeWrite() + + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') + }) - // let message arrive - await delay(100) + it('should not remove a half-closed inbound stream', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) + + await listenerStream.closeWrite() + + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') + }) + + it('should remove a stream half closed from both ends', async () => { + const [ + listenerStream, + dialerStream + ] = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) + + expect(dialer.streams).to.include(dialerStream, 'dialer did not store outbound stream') + expect(listener.streams).to.include(listenerStream, 'listener did not store inbound stream') + + await listenerStream.closeWrite() + + expect(dialer.streams).to.include(dialerStream, 'dialer removed outbound stream before fully closing') + expect(listener.streams).to.include(listenerStream, 'listener removed inbound stream before fully closing') + + await Promise.all([ + raceEvent(listenerStream, 'close'), + dialerStream.closeWrite() + ]) + + await delay(10) - // close should time out as message is never read - await expect(pb.unwrap().close()).to.eventually.be.rejected - .with.property('name', 'TimeoutError') + expect(dialer.streams).to.not.include(dialerStream, 'dialer did not remove outbound stream close') + expect(listener.streams).to.not.include(listenerStream, 'listener did not remove inbound stream after close') }) - */ }) } diff --git a/packages/interface-compliance-tests/src/stream-muxer/index.ts b/packages/interface-compliance-tests/src/stream-muxer/index.ts index ce71d22c16..a1d95a9859 100644 --- a/packages/interface-compliance-tests/src/stream-muxer/index.ts +++ b/packages/interface-compliance-tests/src/stream-muxer/index.ts @@ -1,6 +1,6 @@ import baseTest from './base-test.js' import closeTest from './close-test.js' -import megaStressTest from './mega-stress-test.js' +import steamTest from './stream-test.js' import stressTest from './stress-test.js' import type { TestSetup } from '../index.js' import type { StreamMuxerFactory } from '@libp2p/interface' @@ -9,7 +9,7 @@ export default (common: TestSetup): void => { describe('interface-stream-muxer', () => { baseTest(common) closeTest(common) + steamTest(common) stressTest(common) - megaStressTest(common) }) } diff --git a/packages/interface-compliance-tests/src/stream-muxer/mega-stress-test.ts b/packages/interface-compliance-tests/src/stream-muxer/mega-stress-test.ts deleted file mode 100644 index 0e1c05be33..0000000000 --- a/packages/interface-compliance-tests/src/stream-muxer/mega-stress-test.ts +++ /dev/null @@ -1,14 +0,0 @@ -import spawn from './spawner.js' -import type { TestSetup } from '../index.js' -import type { StreamMuxer, StreamMuxerFactory, StreamMuxerInit } from '@libp2p/interface' - -export default (common: TestSetup): void => { - const createMuxer = async (init?: StreamMuxerInit): Promise => { - const factory = await common.setup() - return factory.createStreamMuxer(init) - } - - describe.skip('mega stress test', function () { - it('10,000 streams with 10,000 msg', async () => { await spawn(createMuxer, 10000, 10000, 5000) }) - }) -} diff --git a/packages/interface-compliance-tests/src/stream-muxer/spawner.ts b/packages/interface-compliance-tests/src/stream-muxer/spawner.ts deleted file mode 100644 index cd97dfe0f2..0000000000 --- a/packages/interface-compliance-tests/src/stream-muxer/spawner.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { expect } from 'aegir/chai' -import all from 'it-all' -import drain from 'it-drain' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' -import pLimit from 'p-limit' -import { Uint8ArrayList } from 'uint8arraylist' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import type { StreamMuxer, StreamMuxerInit } from '@libp2p/interface' - -export default async (createMuxer: (init?: StreamMuxerInit) => Promise, nStreams: number, nMsg: number, limit?: number): Promise => { - const [dialerSocket, listenerSocket] = duplexPair() - - const msg = new Uint8ArrayList(uint8ArrayFromString('simple msg')) - - const listener = await createMuxer({ - direction: 'inbound', - onIncomingStream: (stream) => { - void pipe( - stream, - drain - ).then(async () => { - await stream.close() - }) - .catch(err => { stream.abort(err) }) - } - }) - const dialer = await createMuxer({ - direction: 'outbound' - }) - - void pipe(listenerSocket, listener, listenerSocket) - void pipe(dialerSocket, dialer, dialerSocket) - - const spawnStream = async (): Promise => { - const stream = await dialer.newStream() - expect(stream).to.exist // eslint-disable-line - - const res = await pipe( - (async function * () { - for (let i = 0; i < nMsg; i++) { - yield msg - } - }()), - stream, - async (source) => all(source) - ) - - expect(res).to.be.eql([]) - } - - const limiter = pLimit(limit ?? Infinity) - - await Promise.all( - Array.from(Array(nStreams), async () => { await limiter(async () => { await spawnStream() }) }) - ) -} diff --git a/packages/interface-compliance-tests/src/stream-muxer/stream-test.ts b/packages/interface-compliance-tests/src/stream-muxer/stream-test.ts new file mode 100644 index 0000000000..a5c245bc3d --- /dev/null +++ b/packages/interface-compliance-tests/src/stream-muxer/stream-test.ts @@ -0,0 +1,325 @@ +import { StreamCloseEvent, StreamMessageEvent } from '@libp2p/interface' +import { multiaddrConnectionPair } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import delay from 'delay' +import { raceEvent } from 'race-event' +import Sinon from 'sinon' +import { isValidTick } from '../is-valid-tick.ts' +import type { TestSetup } from '../index.ts' +import type { MultiaddrConnection, Stream, StreamMuxer, StreamMuxerFactory } from '@libp2p/interface' + +export default (common: TestSetup): void => { + describe('streams', () => { + let dialer: StreamMuxer + let listener: StreamMuxer + let outboundStream: Stream + let inboundStream: Stream + let streams: [Stream, Stream] + let outboundConnection: MultiaddrConnection + let inboundConnection: MultiaddrConnection + + beforeEach(async () => { + ([outboundConnection, inboundConnection] = multiaddrConnectionPair()) + + const dialerFactory = await common.setup() + dialer = dialerFactory.createStreamMuxer(outboundConnection) + + const listenerFactory = await common.setup() + listener = listenerFactory.createStreamMuxer(inboundConnection) + + streams = await Promise.all([ + raceEvent>(listener, 'stream').then(evt => evt.detail), + dialer.createStream() + ]) + + inboundStream = streams[0] + outboundStream = streams[1] + }) + + afterEach(async () => { + await dialer?.close() + await listener?.close() + }) + + it('should have correct status after opening', () => { + streams.forEach(stream => { + expect(stream).to.have.property('status', 'open', `${stream.direction} stream status was incorrect`) + expect(stream).to.have.property('writeStatus', 'writable', `${stream.direction} stream writeStatus was incorrect`) + expect(stream).to.have.property('readStatus', 'readable', `${stream.direction} stream readStatus was incorrect`) + }) + }) + + it('should have correct timeline after opening', () => { + streams.forEach(stream => { + expect(isValidTick(stream.timeline.open)).to.equal(true, `${stream.direction} stream timeline.open was incorrect`) + expect(stream).to.not.have.nested.property('timeline.close', `${stream.direction} stream timeline.close was incorrect`) + expect(stream).to.not.have.nested.property('timeline.closeRead', `${stream.direction} stream timeline.closeRead was incorrect`) + expect(stream).to.not.have.nested.property('timeline.closeWrite', `${stream.direction} stream timeline.closeWrite was incorrect`) + expect(stream).to.not.have.nested.property('timeline.reset', `${stream.direction} stream timeline.reset was incorrect`) + expect(stream).to.not.have.nested.property('timeline.abort', `${stream.direction} stream timeline.abort was incorrect`) + }) + }) + + it('outbound stream sends data', async () => { + const messageEventPromise = raceEvent(inboundStream, 'message') + const data = Uint8Array.from([0, 1, 2, 3, 4]) + + outboundStream.send(data) + + const evt = await messageEventPromise + expect(evt.data.subarray()).to.equalBytes(data) + }) + + it('inbound stream sends data', async () => { + const messageEventPromise = raceEvent(outboundStream, 'message') + const data = Uint8Array.from([0, 1, 2, 3, 4]) + + inboundStream.send(data) + + const evt = await messageEventPromise + expect(evt.data.subarray()).to.equalBytes(data) + }) + + it('closes', async () => { + const signal = AbortSignal.timeout(1_000) + + void outboundStream.closeWrite({ + signal + }) + void inboundStream.closeWrite({ + signal + }) + + expect(outboundStream).to.have.property('status', 'open') + expect(outboundStream).to.have.property('readStatus', 'closed') + expect(outboundStream).to.have.property('writeStatus', 'closing') + + await Promise.all([ + raceEvent(outboundStream, 'close', signal), + raceEvent(inboundStream, 'close', signal) + ]) + + streams.forEach(stream => { + expect(stream).to.have.property('status', 'closed', `${stream.direction} stream status was incorrect`) + expect(stream).to.have.property('writeStatus', 'closed', `${stream.direction} stream writeStatus was incorrect`) + expect(stream).to.have.property('readStatus', 'closed', `${stream.direction} stream readStatus was incorrect`) + + expect(isValidTick(stream.timeline.open)).to.equal(true, `${stream.direction} stream timeline.open was incorrect`) + expect(isValidTick(stream.timeline.close)).to.equal(true, `${stream.direction} stream timeline.close was incorrect`) + expect(isValidTick(stream.timeline.closeWrite)).to.equal(true, `${stream.direction} stream timeline.closeWrite was incorrect`) + + expect(stream).to.not.have.nested.property('timeline.reset', `${stream.direction} stream timeline.reset was incorrect`) + expect(stream).to.not.have.nested.property('timeline.abort', `${stream.direction} stream timeline.abort was incorrect`) + }) + }) + + it('closes for writing', async () => { + const signal = AbortSignal.timeout(1_000) + + const eventPromises = Promise.all([ + raceEvent(outboundStream, 'closeWrite'), + raceEvent(inboundStream, 'remoteCloseWrite') + ]) + + void outboundStream.closeWrite({ + signal + }) + + expect(outboundStream).to.have.property('writeStatus', 'closing') + + await delay(100) + + expect(inboundStream).to.have.property('readStatus', 'closed') + + await eventPromises + + streams.forEach(stream => { + expect(stream).to.have.property('status', 'open', `${stream.direction} stream status was incorrect`) + + expect(isValidTick(stream.timeline.open)).to.equal(true, `${stream.direction} stream timeline.open was incorrect`) + + expect(stream).to.not.have.nested.property('timeline.close', `${stream.direction} stream timeline.close was incorrect`) + expect(stream).to.not.have.nested.property('timeline.reset', `${stream.direction} stream timeline.reset was incorrect`) + expect(stream).to.not.have.nested.property('timeline.abort', `${stream.direction} stream timeline.abort was incorrect`) + }) + + expect(outboundStream).to.have.property('writeStatus', 'closed', 'outbound stream writeStatus was incorrect') + expect(outboundStream).to.have.property('readStatus', 'readable', 'inbound stream readStatus was incorrect') + + expect(outboundStream).to.not.have.nested.property('timeline.closeRead', 'inbound stream timeline.closeRead was incorrect') + expect(isValidTick(outboundStream.timeline.closeWrite)).to.equal(true, 'inbound stream timeline.closeWrite was incorrect') + + expect(inboundStream).to.have.property('writeStatus', 'writable', 'inbound stream writeStatus was incorrect') + expect(inboundStream).to.have.property('readStatus', 'closed', 'inbound stream readStatus was incorrect') + + expect(inboundStream).to.not.have.nested.property('timeline.closeWrite', 'inbound stream timeline.closeWrite was incorrect') + }) + + it('aborts', async () => { + const eventPromises = Promise.all([ + raceEvent(outboundStream, 'close'), + raceEvent(inboundStream, 'close') + ]) + + const err = new Error('Urk!') + outboundStream.abort(err) + + const [outboundEvent, inboundEvent] = await eventPromises + + streams.forEach(stream => { + expect(stream).to.have.property('writeStatus', 'closed', `${stream.direction} stream writeStatus was incorrect`) + expect(stream).to.have.property('readStatus', 'closed', `${stream.direction} stream readStatus was incorrect`) + + expect(isValidTick(stream.timeline.open)).to.equal(true, `${stream.direction} stream timeline.open was incorrect`) + + expect(stream).to.not.have.nested.property('timeline.close', `${stream.direction} stream timeline.close was incorrect`) + }) + + expect(outboundStream).to.have.property('status', 'aborted', 'outbound stream status was incorrect') + expect(isValidTick(outboundStream.timeline.abort)).to.equal(true, 'outbound stream timeline.abort was incorrect') + expect(outboundStream).to.not.have.nested.property('timeline.reset', 'outbound stream timeline.reset was incorrect') + + expect(inboundStream).to.have.property('status', 'reset', 'inbound stream status was incorrect') + expect(inboundStream).to.not.have.nested.property('timeline.abort', 'outbound stream timeline.abort was incorrect') + expect(isValidTick(inboundStream.timeline.reset)).to.equal(true, 'inbound stream timeline.reset was incorrect') + + expect(() => outboundStream.send(Uint8Array.from([0, 1, 2, 3]))).to.throw() + .with.property('name', 'StreamStateError', 'could still write to aborted stream') + + expect(() => inboundStream.send(Uint8Array.from([0, 1, 2, 3]))).to.throw() + .with.property('name', 'StreamStateError', 'could still write to reset stream') + + expect(outboundEvent).to.have.property('error', err) + expect(inboundEvent).to.have.nested.property('error.name', 'StreamResetError') + }) + + it('does not send close read when remote closes write', async () => { + // @ts-expect-error internal method of AbstractMessageStream + const sendCloseReadSpy = Sinon.spy(outboundStream, 'sendCloseRead') + + await Promise.all([ + raceEvent(outboundStream, 'remoteCloseWrite'), + inboundStream.closeWrite() + ]) + + await delay(100) + + expect(sendCloseReadSpy.called).to.be.false() + }) + + it('does not send close read or write when remote resets', async () => { + // @ts-expect-error internal method of AbstractMessageStream + const sendCloseReadSpy = Sinon.spy(outboundStream, 'sendCloseRead') + // @ts-expect-error internal method of AbstractMessageStream + const sendCloseWriteSpy = Sinon.spy(outboundStream, 'sendCloseWrite') + + await Promise.all([ + raceEvent(outboundStream, 'close'), + inboundStream.abort(new Error('Urk!')) + ]) + + await delay(100) + + await outboundStream.closeWrite() + + await delay(100) + + expect(sendCloseReadSpy.called).to.be.false() + expect(sendCloseWriteSpy.called).to.be.false() + }) + + it('should wait for sending data to finish when closing gracefully', async () => { + let sent = 0 + let received = 0 + let filledBuffer = false + const receivedAll = Promise.withResolvers() + + inboundStream.addEventListener('message', (evt) => { + received += evt.data.byteLength + + if (filledBuffer && received === sent) { + receivedAll.resolve(true) + } + }) + + // fill the send buffer + while (true) { + const length = 1024 + sent += length + const sendMore = outboundStream.send(new Uint8Array(length)) + + if (sendMore === false) { + filledBuffer = true + break + } + } + + expect(outboundStream.writeStatus).to.equal('paused') + + // close gracefully + await outboundStream.closeWrite() + + await expect(receivedAll.promise).to.eventually.be.true('did not receive all data') + }) + + it('should wait for sending data to finish when closing the writable end gracefully', async () => { + let sent = 0 + let received = 0 + let filledBuffer = false + const receivedAll = Promise.withResolvers() + + inboundStream.addEventListener('message', (evt) => { + received += evt.data.byteLength + + if (filledBuffer && received === sent) { + receivedAll.resolve(true) + } + }) + + // fill the send buffer + while (true) { + const length = 1024 + sent += length + const sendMore = outboundStream.send(new Uint8Array(length)) + + if (sendMore === false) { + filledBuffer = true + break + } + } + + expect(outboundStream.writeStatus).to.equal('paused') + + // close gracefully + await outboundStream.closeWrite() + + await expect(receivedAll.promise).to.eventually.be.true('did not receive all data') + }) + + it('should abort close due to timeout with slow sender', async () => { + // @ts-expect-error private fields + outboundConnection.local.delay = 100 + + inboundStream = streams[0] + outboundStream = streams[1] + + // fill the send buffer + while (true) { + const length = 1024 + const sendMore = outboundStream.send(new Uint8Array(length)) + + if (sendMore === false) { + break + } + } + + expect(outboundStream.writeStatus).to.equal('paused') + + // close stream, should be aborted + await expect(outboundStream.closeWrite({ + signal: AbortSignal.timeout(10) + })).to.eventually.be.rejected + .with.property('name', 'AbortError') + }) + }) +} diff --git a/packages/interface-compliance-tests/src/stream-muxer/stress-test.ts b/packages/interface-compliance-tests/src/stream-muxer/stress-test.ts index 770f81fb88..887eca2023 100644 --- a/packages/interface-compliance-tests/src/stream-muxer/stress-test.ts +++ b/packages/interface-compliance-tests/src/stream-muxer/stress-test.ts @@ -1,27 +1,91 @@ -import spawn from './spawner.js' +import { multiaddrConnectionPair, echo } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import { pEvent } from 'p-event' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import type { TestSetup } from '../index.js' -import type { StreamMuxerFactory, StreamMuxerInit, StreamMuxer } from '@libp2p/interface' +import type { StreamMuxerFactory, StreamMuxer, MultiaddrConnection } from '@libp2p/interface' + +async function spawn (createMuxer: (maConn: MultiaddrConnection) => Promise, nStreams: number, nMsg: number): Promise { + const [outboundConnection, inboundConnection] = multiaddrConnectionPair() + + const listener = await createMuxer(inboundConnection) + listener.addEventListener('stream', (evt) => { + echo(evt.detail) + }) + + const dialer = await createMuxer(outboundConnection) + + const spawnStream = async (): Promise => { + let sentBytes = 0 + let receivedBytes = 0 + + const receivedAllMessagesPromise = Promise.withResolvers() + const outboundStream = await dialer.createStream() + + async function * messages (): AsyncGenerator { + for (let i = 0; i < nMsg; i++) { + yield uint8ArrayFromString(`message ${i + 1}/${nMsg}`) + } + } + + outboundStream.addEventListener('message', (evt) => { + receivedBytes += evt.data.byteLength + + if (receivedBytes === sentBytes) { + receivedAllMessagesPromise.resolve() + } + }) + + for await (const buf of messages()) { + sentBytes += buf.byteLength + } + + for await (const buf of messages()) { + const sendMore = outboundStream.send(buf) + + if (sendMore === false) { + await pEvent(outboundStream, 'drain', { + rejectionEvents: ['close'] + }) + } + } + + await receivedAllMessagesPromise.promise + outboundStream.log('sent and received all messages %d/%d', receivedBytes, sentBytes) + + await outboundStream.closeWrite() + + expect(receivedBytes).to.equal(sentBytes) + } + + await Promise.all( + Array.from(Array(nStreams), async () => { + await spawnStream() + }) + ) + + await listener.close() + await dialer.close() +} export default (common: TestSetup): void => { - const createMuxer = async (init?: StreamMuxerInit): Promise => { + const createMuxer = async (maConn: MultiaddrConnection): Promise => { const factory = await common.setup() - return factory.createStreamMuxer(init) + return factory.createStreamMuxer(maConn) } + const streams = [1, 10, 100, 1000] + const messages = [1, 10, 100, 1000] + describe('stress test', function () { - this.timeout(1600000) - - it('1 stream with 1 msg', async () => { await spawn(createMuxer, 1, 1) }) - it('1 stream with 10 msg', async () => { await spawn(createMuxer, 1, 10) }) - it('1 stream with 100 msg', async () => { await spawn(createMuxer, 1, 100) }) - it('10 streams with 1 msg', async () => { await spawn(createMuxer, 10, 1) }) - it('10 streams with 10 msg', async () => { await spawn(createMuxer, 10, 10) }) - it('10 streams with 100 msg', async () => { await spawn(createMuxer, 10, 100) }) - it('100 streams with 1 msg', async () => { await spawn(createMuxer, 100, 1) }) - it('100 streams with 10 msg', async () => { await spawn(createMuxer, 100, 10) }) - it('100 streams with 100 msg', async () => { await spawn(createMuxer, 100, 100) }) - it('1000 streams with 1 msg', async () => { await spawn(createMuxer, 1000, 1) }) - it('1000 streams with 10 msg', async () => { await spawn(createMuxer, 1000, 10) }) - it('1000 streams with 100 msg', async () => { await spawn(createMuxer, 1000, 100) }) + this.timeout(1_600_000) + + for (let i = 0; i < streams.length; i++) { + for (let j = 0; j < messages.length; j++) { + it(`${streams[i]} stream(s) with ${messages[j]} msg(s)`, async () => { + await spawn(createMuxer, streams[i], messages[j]) + }) + } + } }) } diff --git a/packages/interface-compliance-tests/src/transport/index.ts b/packages/interface-compliance-tests/src/transport/index.ts index deb5b1d510..b4e950e046 100644 --- a/packages/interface-compliance-tests/src/transport/index.ts +++ b/packages/interface-compliance-tests/src/transport/index.ts @@ -3,10 +3,10 @@ import { expect } from 'aegir/chai' import delay from 'delay' import drain from 'it-drain' import { pushable } from 'it-pushable' -import pDefer from 'p-defer' import { pEvent } from 'p-event' import pRetry from 'p-retry' import pWaitFor from 'p-wait-for' +import { raceEvent } from 'race-event' import { raceSignal } from 'race-signal' import { isValidTick } from '../is-valid-tick.js' import { createPeer, getTransportManager, getUpgrader } from './utils.js' @@ -16,7 +16,6 @@ import type { Connection, Libp2p, Stream, StreamHandler } from '@libp2p/interfac import type { Multiaddr } from '@multiformats/multiaddr' import type { MultiaddrMatcher } from '@multiformats/multiaddr-matcher' import type { Libp2pInit } from 'libp2p' -import type { DeferredPromise } from 'p-defer' export interface TransportTestFixtures { /** @@ -133,10 +132,10 @@ export default (common: TestSetup): void => { it('should close all streams when the connection closes', async () => { ({ dialer, listener, dialAddrs } = await getSetup(common)) - let incomingConnectionPromise: DeferredPromise | undefined + let incomingConnectionPromise: PromiseWithResolvers | undefined if (listener != null) { - incomingConnectionPromise = pDefer() + incomingConnectionPromise = Promise.withResolvers() listener.addEventListener('connection:open', (event) => { const conn = event.detail @@ -255,7 +254,7 @@ export default (common: TestSetup): void => { } }) - it('can close a stream for reading but send a large amount of data', async function () { + it('can close a stream for writing but receive a large amount of data', async function () { const timeout = 120_000 this.timeout(timeout); ({ dialer, listener, dialAddrs } = await getSetup(common)) @@ -264,73 +263,20 @@ export default (common: TestSetup): void => { return this.skip() } - const protocol = '/send-data/1.0.0' + const protocol = '/receive-data/1.0.0' const chunkSize = 1024 const bytes = chunkSize * 1024 * 10 - const deferred = pDefer() - - await listener.handle(protocol, ({ stream }) => { - Promise.resolve().then(async () => { - let read = 0 - for await (const buf of stream.source) { - read += buf.byteLength + await listener.handle(protocol, async (stream) => { + for (let i = 0; i < bytes; i += chunkSize) { + const sendMore = stream.send(new Uint8Array(chunkSize)) - if (read === bytes) { - deferred.resolve() - break - } + if (!sendMore) { + await raceEvent(stream, 'drain') } - }) - .catch(err => { - deferred.reject(err) - stream.abort(err) - }) - }) - - const stream = await dialer.dialProtocol(dialAddrs[0], protocol) - - await stream.closeRead() - - await stream.sink((async function * () { - for (let i = 0; i < bytes; i += chunkSize) { - yield new Uint8Array(chunkSize) } - })()) - - await stream.close() - - await deferred.promise - }) - - it('can close a stream for writing but receive a large amount of data', async function () { - const timeout = 120_000 - this.timeout(timeout); - ({ dialer, listener, dialAddrs } = await getSetup(common)) - - if (listener == null) { - return this.skip() - } - - const protocol = '/receive-data/1.0.0' - const chunkSize = 1024 - const bytes = chunkSize * 1024 * 10 - const deferred = pDefer() - await listener.handle(protocol, ({ stream }) => { - Promise.resolve().then(async () => { - await stream.sink((async function * () { - for (let i = 0; i < bytes; i += chunkSize) { - yield new Uint8Array(chunkSize) - } - })()) - - await stream.close() - }) - .catch(err => { - deferred.reject(err) - stream.abort(err) - }) + await stream.closeWrite() }) const stream = await dialer.dialProtocol(dialAddrs[0], protocol) @@ -339,7 +285,7 @@ export default (common: TestSetup): void => { let read = 0 - for await (const buf of stream.source) { + for await (const buf of stream) { read += buf.byteLength } @@ -362,18 +308,14 @@ export default (common: TestSetup): void => { * | <--- FIN_ACK | */ - const getRemoteStream = pDefer() + const getRemoteStream = Promise.withResolvers() const protocol = '/close-local-while-remote-writes/1.0.0' - const streamHandler: StreamHandler = ({ stream }) => { - void Promise.resolve().then(async () => { - getRemoteStream.resolve(stream) - }) + const streamHandler: StreamHandler = (stream) => { + getRemoteStream.resolve(stream) } - await listener.handle(protocol, (info) => { - streamHandler(info) - }, { + await listener.handle(protocol, streamHandler, { runOnLimitedConnection: true }) @@ -388,18 +330,26 @@ export default (common: TestSetup): void => { const p = stream.closeWrite() const remoteStream = await getRemoteStream.promise - // close the readable end of the remote stream - await remoteStream.closeRead() // keep the remote write end open, this should delay the FIN_ACK reply to the local stream const remoteInputStream = pushable() - void remoteStream.sink(remoteInputStream) + Promise.resolve().then(async () => { + for await (const buf of remoteInputStream) { + const sendMore = remoteStream.send(buf) + + if (sendMore === false) { + await raceEvent(stream, 'drain') + } + } + + await remoteStream.closeWrite() + }) // wait for remote to receive local close-write await pRetry(() => { - if (remoteStream.readStatus !== 'closed') { - throw new Error('Remote stream read status ' + remoteStream.readStatus) - } + // if (remoteStream.readStatus !== 'closed') { + // throw new Error('Remote stream read status ' + remoteStream.readStatus) + // } }, { minTimeout: 100 }) @@ -436,18 +386,14 @@ export default (common: TestSetup): void => { * | <--- FIN_ACK | */ - const getRemoteStream = pDefer() + const getRemoteStream = Promise.withResolvers() const protocol = '/close-local-while-remote-reads/1.0.0' - const streamHandler: StreamHandler = ({ stream }) => { - void Promise.resolve().then(async () => { - getRemoteStream.resolve(stream) - }) + const streamHandler: StreamHandler = (stream) => { + getRemoteStream.resolve(stream) } - await listener.handle(protocol, (info) => { - streamHandler(info) - }, { + await listener.handle(protocol, streamHandler, { runOnLimitedConnection: true }) @@ -459,33 +405,30 @@ export default (common: TestSetup): void => { }) // keep the remote write end open, this should delay the FIN_ACK reply to the local stream - const p = stream.sink([]) + const p = stream.closeWrite() const remoteStream = await getRemoteStream.promise - // close the readable end of the remote stream - await remoteStream.closeRead() + // readable end should finish - await drain(remoteStream.source) + await drain(remoteStream) // wait for remote to receive local close-write await pRetry(() => { - if (remoteStream.readStatus !== 'closed') { - throw new Error('Remote stream read status ' + remoteStream.readStatus) - } + // if (remoteStream.readStatus !== 'closed') { + // throw new Error('Remote stream read status ' + remoteStream.readStatus) + // } }, { minTimeout: 100 }) // remote closes write - await remoteStream.sink([]) + await remoteStream.closeWrite() // wait to receive FIN_ACK await p - // close read end of stream - await stream.closeRead() // readable end should finish - await drain(stream.source) + await drain(stream) // wait for remote to notice closure await pRetry(() => { @@ -518,7 +461,7 @@ export default (common: TestSetup): void => { await listener.stop() - const transportListeningPromise = pDefer() + const transportListeningPromise = Promise.withResolvers() listener.addEventListener('transport:listening', (event) => { const transportListener = event.detail @@ -568,6 +511,5 @@ function assertStreamClosed (stream: Stream): void { expect(stream.writeStatus).to.equal('closed') expect(stream.timeline.close).to.be.a('number') - expect(stream.timeline.closeRead).to.be.a('number') expect(stream.timeline.closeWrite).to.be.a('number') } diff --git a/packages/interface-compliance-tests/src/transport/utils.ts b/packages/interface-compliance-tests/src/transport/utils.ts index d7a576c236..4f517346b7 100644 --- a/packages/interface-compliance-tests/src/transport/utils.ts +++ b/packages/interface-compliance-tests/src/transport/utils.ts @@ -3,8 +3,8 @@ import { echo } from '@libp2p/echo' import { memory } from '@libp2p/memory' import { plaintext } from '@libp2p/plaintext' +import { mockMuxer } from '@libp2p/utils' import { createLibp2p } from 'libp2p' -import { mockMuxer } from '../mocks/muxer.js' import type { Echo } from '@libp2p/echo' import type { Libp2p, Upgrader } from '@libp2p/interface' import type { TransportManager } from '@libp2p/interface-internal' @@ -28,7 +28,7 @@ export async function createPeer (config: Partial = {}): Promise< services: { ...config.services, echo: echo({ - maxInboundStreams: 5 + maxInboundStreams: 5_000 }) } }) diff --git a/packages/interface-compliance-tests/test/is-valid-tick.spec.ts b/packages/interface-compliance-tests/test/is-valid-tick.spec.ts new file mode 100644 index 0000000000..e4c1d065eb --- /dev/null +++ b/packages/interface-compliance-tests/test/is-valid-tick.spec.ts @@ -0,0 +1,20 @@ +import { expect } from 'aegir/chai' +import { isValidTick } from '../src/is-valid-tick.js' + +describe('is-valid-tick', () => { + it('should validate tick', async () => { + expect(isValidTick(Date.now())).to.be.true() + }) + + it('should validate tick within specified ms', async () => { + expect(isValidTick(Date.now() - 100, 500)).to.be.true() + }) + + it('should not validate future tick', async () => { + expect(isValidTick(Date.now() + 100)).to.be.false() + }) + + it('should not validate tick outside specified ms', async () => { + expect(isValidTick(Date.now() - 100, 50)).to.be.false() + }) +}) diff --git a/packages/interface-compliance-tests/test/matchers.spec.ts b/packages/interface-compliance-tests/test/matchers.spec.ts deleted file mode 100644 index 8c67047c58..0000000000 --- a/packages/interface-compliance-tests/test/matchers.spec.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { generateKeyPair } from '@libp2p/crypto/keys' -import { peerIdFromString, peerIdFromPrivateKey } from '@libp2p/peer-id' -import { multiaddr } from '@multiformats/multiaddr' -import { expect } from 'aegir/chai' -import Sinon from 'sinon' -import { matchMultiaddr, matchPeerId } from '../src/matchers.js' - -describe('peer id matcher', () => { - it('should match the same object', async () => { - const privateKey = await generateKeyPair('Ed25519') - const peerId = peerIdFromPrivateKey(privateKey) - - const stub = Sinon.stub() - stub(peerId) - - expect(stub.calledWith(matchPeerId(peerId))).to.be.true() - }) - - it('should match the same value', async () => { - const privateKey = await generateKeyPair('Ed25519') - const peerId = peerIdFromPrivateKey(privateKey) - const peerId2 = peerIdFromString(peerId.toString()) - - const stub = Sinon.stub() - stub(peerId) - - expect(stub.calledWith(peerId2)).to.be.true() - expect(stub.calledWith(matchPeerId(peerId2))).to.be.true() - }) -}) - -describe('multiaddr matcher', () => { - it('should match the same object', async () => { - const ma = multiaddr('/ip4/127.0.0.1/tcp/4001') - - const stub = Sinon.stub() - stub(ma) - - expect(stub.calledWith(matchMultiaddr(ma))).to.be.true() - }) - - it('should match the same value', async () => { - const ma = multiaddr('/ip4/127.0.0.1/tcp/4001') - const ma2 = multiaddr('/ip4/127.0.0.1/tcp/4001') - - const stub = Sinon.stub() - stub(ma) - - // this would match because no properties are changed after creation since - // https://github.com/multiformats/js-multiaddr/pull/330 - // expect(stub.calledWith(ma2)).to.be.false() - expect(stub.calledWith(matchMultiaddr(ma2))).to.be.true() - }) -}) diff --git a/packages/interface-compliance-tests/typedoc.json b/packages/interface-compliance-tests/typedoc.json index a8ac4312e5..fe055d7a81 100644 --- a/packages/interface-compliance-tests/typedoc.json +++ b/packages/interface-compliance-tests/typedoc.json @@ -3,11 +3,7 @@ "entryPoints": [ "./src/index.ts", "./src/connection-encryption/index.ts", - "./src/is-valid-tick.ts", - "./src/matchers.ts", - "./src/mocks/index.ts", "./src/peer-discovery/index.ts", - "./src/pubsub/index.ts", "./src/stream-muxer/index.ts", "./src/transport/index.ts" ] diff --git a/packages/interface-internal/src/connection-manager.ts b/packages/interface-internal/src/connection-manager.ts index f7f76253be..adde004171 100644 --- a/packages/interface-internal/src/connection-manager.ts +++ b/packages/interface-internal/src/connection-manager.ts @@ -98,9 +98,9 @@ export interface ConnectionManager { * otherwise it will return false. * * @param maConn - The multiaddr connection to evaluate. - * @returns A promise that resolves to `true` if the connection can be accepted, `false` otherwise. + * @returns `true` if the connection can be accepted, `false` otherwise. */ - acceptIncomingConnection(maConn: MultiaddrConnection): Promise + acceptIncomingConnection(maConn: MultiaddrConnection): boolean /** * Invoked after upgrading an inbound multiaddr connection has finished diff --git a/packages/interface-internal/src/registrar.ts b/packages/interface-internal/src/registrar.ts index dc96e3877f..d00d4b30f8 100644 --- a/packages/interface-internal/src/registrar.ts +++ b/packages/interface-internal/src/registrar.ts @@ -1,12 +1,7 @@ -import type { StreamHandler, StreamHandlerOptions, StreamHandlerRecord, Topology, IncomingStreamData } from '@libp2p/interface' +import type { StreamHandler, StreamHandlerOptions, StreamHandlerRecord, Topology } from '@libp2p/interface' import type { AbortOptions } from '@multiformats/multiaddr' export type { - /** - * @deprecated This type should be imported from @libp2p/interface directly - */ - IncomingStreamData, - /** * @deprecated This type should be imported from @libp2p/interface directly */ diff --git a/packages/interface/package.json b/packages/interface/package.json index ce37de7af7..95915b2db5 100644 --- a/packages/interface/package.json +++ b/packages/interface/package.json @@ -44,7 +44,6 @@ "@multiformats/dns": "^1.0.6", "@multiformats/multiaddr": "^12.4.4", "it-pushable": "^3.2.3", - "it-stream-types": "^2.0.2", "main-event": "^1.0.1", "multiformats": "^13.3.6", "progress-events": "^1.0.1", diff --git a/packages/interface/src/connection-encrypter.ts b/packages/interface/src/connection-encrypter.ts index e4ef74b469..796f0724b3 100644 --- a/packages/interface/src/connection-encrypter.ts +++ b/packages/interface/src/connection-encrypter.ts @@ -1,8 +1,4 @@ -import type { MultiaddrConnection } from './connection.js' -import type { AbortOptions, Logger, StreamMuxerFactory } from './index.js' -import type { PeerId } from './peer-id.js' -import type { Duplex } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { AbortOptions, StreamMuxerFactory, MultiaddrConnection, PeerId, MessageStream } from './index.js' /** * If the remote PeerId is known and passed as an option, the securing operation @@ -21,13 +17,6 @@ export interface SecureConnectionOptions extends AbortOptions { skipStreamMuxerNegotiation?: boolean } -/** - * A stream with an optional logger - */ -export interface SecurableStream extends Duplex> { - log?: Logger -} - /** * A libp2p connection encrypter module must be compliant to this interface * to ensure all exchanged data between two peers is encrypted. @@ -40,19 +29,30 @@ export interface ConnectionEncrypter { * pass it for extra verification, otherwise it will be determined during * the handshake. */ - secureOutbound (connection: Stream, options?: SecureConnectionOptions): Promise> + secureOutbound (connection: Stream, options?: SecureConnectionOptions): Promise> /** * Decrypt incoming data. If the remote PeerId is known, * pass it for extra verification, otherwise it will be determined during * the handshake */ - secureInbound (connection: Stream, options?: SecureConnectionOptions): Promise> + secureInbound (connection: Stream, options?: SecureConnectionOptions): Promise> } -export interface SecuredConnection { - conn: Stream +export interface SecuredConnection { + /** + * The decrypted data stream + */ + connection: MessageStream + + /** + * Any extension data transferred as part of the encryption handshake + */ remoteExtensions?: Extension + + /** + * The identifier of the remote peer + */ remotePeer: PeerId /** diff --git a/packages/interface/src/connection-gater.ts b/packages/interface/src/connection-gater.ts index 1695bf6e6b..d5543c7dfa 100644 --- a/packages/interface/src/connection-gater.ts +++ b/packages/interface/src/connection-gater.ts @@ -1,5 +1,4 @@ -import type { MultiaddrConnection } from './connection.js' -import type { PeerId } from './peer-id.js' +import type { MultiaddrConnection, PeerId } from './index.js' import type { Multiaddr } from '@multiformats/multiaddr' export interface ConnectionGater { diff --git a/packages/interface/src/connection-protector.ts b/packages/interface/src/connection-protector.ts new file mode 100644 index 0000000000..0d6cde54cd --- /dev/null +++ b/packages/interface/src/connection-protector.ts @@ -0,0 +1,9 @@ +import type { AbortOptions, MessageStream } from './index.ts' + +export interface ConnectionProtector { + /** + * Takes a MultiaddrConnection and creates a private encryption stream between + * the two peers from the shared key the Protector instance was created with. + */ + protect(connection: MessageStream, options?: AbortOptions): Promise +} diff --git a/packages/interface/src/connection.ts b/packages/interface/src/connection.ts index 5796bad266..280f12484d 100644 --- a/packages/interface/src/connection.ts +++ b/packages/interface/src/connection.ts @@ -1,179 +1,29 @@ -import type { AbortOptions, Logger } from './index.js' -import type { PeerId } from './peer-id.js' +import type { AbortOptions, Logger, TypedEventTarget, Stream, MessageStreamEvents, PeerId, MultiaddrConnectionDirection, MultiaddrConnectionTimeline, MessageStreamStatus } from './index.js' import type { Multiaddr } from '@multiformats/multiaddr' -import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' -export interface ConnectionTimeline { - /** - * When the connection was opened - */ - open: number - - /** - * When the MultiaddrConnection was upgraded to a Connection - e.g. the type - * of connection encryption and multiplexing was negotiated. - */ - upgraded?: number - - /** - * When the connection was closed. - */ - close?: number -} +export type ConnectionStatus = MessageStreamStatus /** - * Outbound connections are opened by the local node, inbound streams are opened by the remote - */ -export type Direction = 'inbound' | 'outbound' - -export interface StreamTimeline { - /** - * A timestamp of when the stream was opened - */ - open: number - - /** - * A timestamp of when the stream was closed for both reading and writing - */ - close?: number - - /** - * A timestamp of when the stream was closed for reading - */ - closeRead?: number - - /** - * A timestamp of when the stream was closed for writing - */ - closeWrite?: number - - /** - * A timestamp of when the stream was reset - */ - reset?: number - - /** - * A timestamp of when the stream was aborted - */ - abort?: number -} - -/** - * The states a stream can be in - */ -export type StreamStatus = 'open' | 'closing' | 'closed' | 'aborted' | 'reset' - -/** - * The states the readable end of a stream can be in - * - * ready - the readable end is ready for reading - * closing - the readable end is closing - * closed - the readable end has closed - */ -export type ReadStatus = 'ready' | 'closing' | 'closed' - -/** - * The states the writable end of a stream can be in - * - * ready - the writable end is ready for writing - * writing - the writable end is in the process of being written to - * done - the source passed to the `.sink` function yielded all values without error - * closing - the writable end is closing - * closed - the writable end has closed - */ -export type WriteStatus = 'ready' | 'writing' | 'done' | 'closing' | 'closed' - -/** - * A Stream is a data channel between two peers that - * can be written to and read from at both ends. + * Connection limits are present on connections that are only allowed to + * transfer a certain amount of bytes or be open for a certain number + * of seconds. * - * It may be encrypted and multiplexed depending on the - * configuration of the nodes. + * These limits are applied by Circuit Relay v2 servers, for example and + * the connection will normally be closed abruptly if the limits are + * exceeded. */ -export interface Stream extends Duplex, Source, Promise> { - /** - * Closes the stream for **reading** *and* **writing**. - * - * Any buffered data in the source can still be consumed and the stream will end normally. - * - * This will cause a `CLOSE` message to be sent to the remote, *unless* the sink has already ended. - * - * The sink and the source will return normally. - */ - close(options?: AbortOptions): Promise - - /** - * Closes the stream for **reading**. If iterating over the source of this stream in a `for await of` loop, it will return (exit the loop) after any buffered data has been consumed. - * - * This function is called automatically by the muxer when it receives a `CLOSE` message from the remote. - * - * The source will return normally, the sink will continue to consume. - */ - closeRead(options?: AbortOptions): Promise - - /** - * Closes the stream for **writing**. If iterating over the source of this stream in a `for await of` loop, it will return (exit the loop) after any buffered data has been consumed. - * - * The source will return normally, the sink will continue to consume. - */ - closeWrite(options?: AbortOptions): Promise - - /** - * Closes the stream for **reading** *and* **writing**. This should be called when a *local error* has occurred. - * - * Note, if called without an error any buffered data in the source can still be consumed and the stream will end normally. - * - * This will cause a `RESET` message to be sent to the remote, *unless* the sink has already ended. - * - * The sink will return and the source will throw. - */ - abort(err: Error): void - - /** - * Unique identifier for a stream. Identifiers are not unique across muxers. - */ - id: string - - /** - * Outbound streams are opened by the local node, inbound streams are opened by the remote - */ - direction: Direction - - /** - * Lifecycle times for the stream - */ - timeline: StreamTimeline - - /** - * The protocol negotiated for this stream - */ - protocol?: string - - /** - * User defined stream metadata - */ - metadata: Record - - /** - * The current status of the stream - */ - status: StreamStatus - - /** - * The current status of the readable end of the stream - */ - readStatus: ReadStatus - +export interface ConnectionLimits { /** - * The current status of the writable end of the stream + * If present this is the number of bytes remaining that may be + * transferred over this connection */ - writeStatus: WriteStatus + bytes?: bigint /** - * The stream logger + * If present this is the number of seconds that this connection will + * remain open for */ - log: Logger + seconds?: number } export interface NewStreamOptions extends AbortOptions { @@ -186,10 +36,11 @@ export interface NewStreamOptions extends AbortOptions { /** * Opt-in to running over a limited connection - one that has restrictions - * on the amount of data that may be transferred or how long it may be open for. + * on the amount of data that may be transferred or how long it may be open + * for. * - * These limits are typically enforced by a relay server, if the protocol - * will be transferring a lot of data or the stream will be open for a long time + * These limits are typically enforced by a relay server, if the protocol will + * be transferring a lot of data or the stream will be open for a long time * consider upgrading to a direct connection before opening the stream. * * @default false @@ -224,38 +75,13 @@ export interface NewStreamOptions extends AbortOptions { negotiateFully?: boolean } -export type ConnectionStatus = 'open' | 'closing' | 'closed' - -/** - * Connection limits are present on connections that are only allowed to - * transfer a certain amount of bytes or be open for a certain number - * of seconds. - * - * These limits are applied by Circuit Relay v2 servers, for example and - * the connection will normally be closed abruptly if the limits are - * exceeded. - */ -export interface ConnectionLimits { - /** - * If present this is the number of bytes remaining that may be - * transferred over this connection - */ - bytes?: bigint - - /** - * If present this is the number of seconds that this connection will - * remain open for - */ - seconds?: number -} - /** * A Connection is a high-level representation of a connection * to a remote peer that may have been secured by encryption and * multiplexed, depending on the configuration of the nodes * between which the connection is made. */ -export interface Connection { +export interface Connection extends TypedEventTarget> { /** * The unique identifier for this connection */ @@ -271,11 +97,6 @@ export interface Connection { */ remotePeer: PeerId - /** - * A list of tags applied to this connection - */ - tags: string[] - /** * A list of open streams on this connection */ @@ -284,12 +105,12 @@ export interface Connection { /** * Outbound connections are opened by the local node, inbound streams are opened by the remote */ - direction: Direction + direction: MultiaddrConnectionDirection /** - * Lifecycle times for the connection + * When stream life cycle events occurred */ - timeline: ConnectionTimeline + timeline: MultiaddrConnectionTimeline /** * The multiplexer negotiated for this connection @@ -320,6 +141,11 @@ export interface Connection { */ rtt?: number + /** + * The connection logger + */ + log: Logger + /** * Create a new stream on this connection and negotiate one of the passed protocols */ @@ -335,11 +161,6 @@ export interface Connection { * Immediately close the connection, any queued data will be discarded */ abort(err: Error): void - - /** - * The connection logger - */ - log: Logger } export const connectionSymbol = Symbol.for('@libp2p/connection') @@ -347,63 +168,3 @@ export const connectionSymbol = Symbol.for('@libp2p/connection') export function isConnection (other: any): other is Connection { return other != null && Boolean(other[connectionSymbol]) } - -export interface ConnectionProtector { - /** - * Takes a given Connection and creates a private encryption stream - * between its two peers from the PSK the Protector instance was - * created with. - */ - protect(connection: MultiaddrConnection, options?: AbortOptions): Promise -} - -export interface MultiaddrConnectionTimeline { - /** - * When the connection was opened - */ - open: number - - /** - * When the MultiaddrConnection was upgraded to a Connection - the type of - * connection encryption and multiplexing was negotiated. - */ - upgraded?: number - - /** - * When the connection was closed. - */ - close?: number -} - -/** - * A MultiaddrConnection is returned by transports after dialing - * a peer. It is a low-level primitive and is the raw connection - * without encryption or stream multiplexing. - */ -export interface MultiaddrConnection extends Duplex> { - /** - * Gracefully close the connection. All queued data will be written to the - * underlying transport. - */ - close(options?: AbortOptions): Promise - - /** - * Immediately close the connection, any queued data will be discarded - */ - abort(err: Error): void - - /** - * The address of the remote end of the connection - */ - remoteAddr: Multiaddr - - /** - * When connection life cycle events occurred - */ - timeline: MultiaddrConnectionTimeline - - /** - * The multiaddr connection logger - */ - log: Logger -} diff --git a/packages/interface/src/errors.ts b/packages/interface/src/errors.ts index b5ff2bd497..aca3b6227d 100644 --- a/packages/interface/src/errors.ts +++ b/packages/interface/src/errors.ts @@ -132,6 +132,18 @@ export class MuxerClosedError extends Error { } } +/** + * Thrown when a protocol stream is closed during an operation + */ +export class StreamClosedError extends Error { + static name = 'StreamClosedError' + + constructor (message = 'The stream has been closed') { + super(message) + this.name = 'StreamClosedError' + } +} + /** * Thrown when a protocol stream is reset by the remote muxer */ @@ -156,6 +168,18 @@ export class StreamStateError extends Error { } } +/** + * Thrown when a stream buffer is full + */ +export class StreamBufferError extends Error { + static name = 'StreamBufferError' + + constructor (message = 'The stream buffer was full') { + super(message) + this.name = 'StreamBufferError' + } +} + /** * Thrown when a value could not be found */ diff --git a/packages/interface/src/events.ts b/packages/interface/src/events.ts new file mode 100644 index 0000000000..9ffff8a6b5 --- /dev/null +++ b/packages/interface/src/events.ts @@ -0,0 +1,44 @@ +import type { Uint8ArrayList } from 'uint8arraylist' + +/** + * A custom implementation of MessageEvent as the Undici version does too much + * validation in it's constructor so is very slow. + */ +export class StreamMessageEvent extends Event { + public data: Uint8Array | Uint8ArrayList + + constructor (data: Uint8Array | Uint8ArrayList, eventInitDict?: EventInit) { + super('message', eventInitDict) + + this.data = data + } +} + +/** + * An event dispatched when the stream is closed. The `error` property can be + * inspected to discover if the closing was graceful or not, and the `remote` + * property shows which end of the stream initiated the closure + */ +export class StreamCloseEvent extends Event { + public error?: Error + public local?: boolean + + constructor (local?: boolean, error?: Error, eventInitDict?: EventInit) { + super('close', eventInitDict) + + this.error = error + this.local = local + } +} + +export class StreamAbortEvent extends StreamCloseEvent { + constructor (error: Error, eventInitDict?: EventInit) { + super(true, error, eventInitDict) + } +} + +export class StreamResetEvent extends StreamCloseEvent { + constructor (error: Error, eventInitDict?: EventInit) { + super(false, error, eventInitDict) + } +} diff --git a/packages/interface/src/index.ts b/packages/interface/src/index.ts index a668840097..33f21394cc 100644 --- a/packages/interface/src/index.ts +++ b/packages/interface/src/index.ts @@ -14,7 +14,7 @@ * ``` */ -import type { Connection, NewStreamOptions, Stream } from './connection.js' +import type { Connection, NewStreamOptions } from './connection.js' import type { ContentRouting } from './content-routing.js' import type { Ed25519PublicKey, PublicKey, RSAPublicKey, Secp256k1PublicKey } from './keys.js' import type { Metrics } from './metrics.js' @@ -24,6 +24,7 @@ import type { PeerRouting } from './peer-routing.js' import type { Address, Peer, PeerStore } from './peer-store.js' import type { Startable } from './startable.js' import type { StreamHandler, StreamHandlerOptions } from './stream-handler.js' +import type { Stream } from './stream.js' import type { Topology } from './topology.js' import type { Listener, OutboundConnectionUpgradeEvents } from './transport.js' import type { DNS } from '@multiformats/dns' @@ -915,9 +916,14 @@ export const serviceDependencies = Symbol.for('@libp2p/service-dependencies') export * from './connection.js' export * from './connection-encrypter.js' export * from './connection-gater.js' +export * from './connection-protector.js' export * from './content-routing.js' +export * from './errors.js' +export * from './events.js' export * from './keys.js' +export * from './message-stream.js' export * from './metrics.js' +export * from './multiaddr-connection.js' export * from './peer-discovery.js' export * from './peer-id.js' export * from './peer-info.js' @@ -925,10 +931,11 @@ export * from './peer-routing.js' export * from './peer-store.js' export * from './pubsub.js' export * from './record.js' +export * from './startable.js' export * from './stream-handler.js' export * from './stream-muxer.js' +export * from './stream.js' export * from './topology.js' export * from './transport.js' -export * from './errors.js' + export * from 'main-event' -export * from './startable.js' diff --git a/packages/interface/src/message-stream.ts b/packages/interface/src/message-stream.ts new file mode 100644 index 0000000000..4af5cd56eb --- /dev/null +++ b/packages/interface/src/message-stream.ts @@ -0,0 +1,241 @@ +import type { Logger, StreamCloseEvent, StreamMessageEvent, TypedEventTarget } from './index.js' +import type { AbortOptions } from '@multiformats/multiaddr' +import type { Uint8ArrayList } from 'uint8arraylist' + +/** + * The direction of the message stream + */ +export type MessageStreamDirection = 'inbound' | 'outbound' + +/** + * The states a message stream can be in + */ +export type MessageStreamStatus = 'open' | 'closing' | 'closed' | 'aborted' | 'reset' + +/** + * The states the readable end of a message stream can be in + */ +export type MessageStreamReadStatus = 'readable' | 'paused' | 'closing' | 'closed' + +/** + * The states the writable end of a message stream can be in + */ +export type MessageStreamWriteStatus = 'writable' | 'paused' | 'closing' | 'closed' + +/** + * An object that records the times of various events + */ +export interface MessageStreamTimeline { + /** + * A timestamp of when the message stream was opened + */ + open: number + + /** + * A timestamp of when the message stream was closed for both reading and + * writing by both ends of the stream + */ + close?: number + + /** + * A timestamp of when the message stream was reset + */ + reset?: number + + /** + * A timestamp of when the message stream was aborted + */ + abort?: number + + /** + * A timestamp of when the stream was closed for reading + */ + closeRead?: number + + /** + * A timestamp of when the stream was closed for writing + */ + closeWrite?: number + + /** + * A timestamp of when the remote stream was closed for reading + */ + remoteCloseRead?: number + + /** + * A timestamp of when the remote stream was closed for writing + */ + remoteCloseWrite?: number +} + +export interface MessageStreamEvents { + /** + * Data was received from the remote end of the message stream + */ + message: StreamMessageEvent + + /** + * The local send buffer can now accept new data + */ + drain: Event + + /** + * Both ends of the closed their writable ends. + * + * The `local` property of the `StreamCloseEvent` can be used to detect + * whether the close event was initiated locally or remotely, and the `error` + * property can be used to tell if the stream closed gracefully or not. + * + * No further events will be emitted and the stream cannot be used to send or + * receive any more data. + */ + close: StreamCloseEvent + + /** + * The readable end of the stream closed gracefully + */ + closeRead: Event + + /** + * The writable end of the stream closed gracefully + */ + closeWrite: Event + + /** + * The remote closed it's readable end of the stream + */ + remoteCloseRead: Event + + /** + * The remote closed it's writable end of the stream + */ + remoteCloseWrite: Event +} + +export interface MessageStream extends TypedEventTarget, AsyncIterable { + /** + * Timestamps of when stream events occurred + */ + timeline: MessageStreamTimeline + + /** + * A logging implementation that can be used to log stream-specific messages + */ + log: Logger + + /** + * Whether this stream is inbound or outbound + */ + direction: MessageStreamDirection + + /** + * The current status of the message stream + */ + status: MessageStreamStatus + + /** + * The current status of the readable end of the stream + */ + readStatus: MessageStreamReadStatus + + /** + * The current status of the writable end of the stream + */ + writeStatus: MessageStreamWriteStatus + + /** + * The current status of the readable end of the stream + */ + remoteReadStatus: MessageStreamReadStatus + + /** + * The current status of the writable end of the stream + */ + remoteWriteStatus: MessageStreamWriteStatus + + /** + * The maximum number of bytes to store when paused. If receipt of more bytes + * from the remote end of the stream causes the buffer size to exceed this + * value the stream will be reset and an 'error' event emitted. + */ + maxPauseBufferLength: number + + /** + * If no data is transmitted over the stream in this many ms, the stream will + * be aborted with an InactivityTimeoutError + */ + inactivityTimeout: number + + /** + * Write data to the stream. If the method returns false it means the + * internal buffer is now full and the caller should wait for the 'drain' + * event before sending more data. + * + * This method may throw if: + * - The internal send buffer is full + * - The stream has previously been closed for writing locally or remotely + */ + send (data: Uint8Array | Uint8ArrayList): boolean + + /** + * Immediately close the stream for reading and writing, discard any + * unsent/unread data, and emit a StreamAbortEvent event. + */ + abort (err: Error): void + + /** + * Gracefully close the stream for reading and writing - any further calls to + * `.send` will throw. + * + * The returned promise will resolve when any outstanding data has been + * written out into the underlying resource. + * + * A 'close' event will be emitted on the stream once any buffered data has + * been sent and the remote end has also closed for writing. + * + * To close the stream immediately call `.abort` instead. + close (options?: AbortOptions): Promise + */ + + /** + * Sends a message to the remote informing them we will not read any more data + * from the stream. + * + * If the writable end of the stream is already closed, a 'close' event will + * be emitted on the stream. + */ + closeRead (options?: AbortOptions): Promise + + /** + * Gracefully close the stream for writing - any outstanding data will be sent + * to the remote and any further calls to `.send` will throw. + * + * If the readable end of the stream is already closed, a 'close' event will + * be emitted on the stream once any buffered data has been sent. + */ + closeWrite (options?: AbortOptions): Promise + + /** + * Stop emitting further 'message' events. Any received data will be stored in + * an internal buffer. If the buffer size reaches `maxPauseBufferLength`, the + * stream will be reset and a StreamAbortEvent emitted. + * + * If the underlying resource supports it, the remote peer will be instructed + * to pause transmission of further data. + */ + pause (): void + + /** + * Resume emitting 'message' events. + * + * If the underlying resource supports it, the remote peer will be informed + * that it is ok to start sending data again. + */ + resume (): void + + /** + * Queue the passed data to be emitted as a 'message' event either during the + * next tick or sooner if data is received from the underlying resource. + */ + push (buf: Uint8Array | Uint8ArrayList): void +} diff --git a/packages/interface/src/metrics.ts b/packages/interface/src/metrics.ts index 11cbbfab79..c277843a76 100644 --- a/packages/interface/src/metrics.ts +++ b/packages/interface/src/metrics.ts @@ -1,4 +1,4 @@ -import type { MultiaddrConnection, Stream, Connection } from './connection.js' +import type { MultiaddrConnection, Stream } from './index.js' /** * Create tracked metrics with these options. Loosely based on the @@ -434,7 +434,7 @@ export interface Metrics { /** * Track a newly opened protocol stream */ - trackProtocolStream(stream: Stream, connection: Connection): void + trackProtocolStream(stream: Stream): void /** * Register an arbitrary metric. Call this to set help/labels for metrics diff --git a/packages/interface/src/multiaddr-connection.ts b/packages/interface/src/multiaddr-connection.ts new file mode 100644 index 0000000000..3cdcd2d6d8 --- /dev/null +++ b/packages/interface/src/multiaddr-connection.ts @@ -0,0 +1,34 @@ +import type { MessageStream, MessageStreamTimeline } from './message-stream.ts' +import type { Multiaddr } from '@multiformats/multiaddr' + +export type MultiaddrConnectionDirection = 'inbound' | 'outbound' + +export interface MultiaddrConnectionTimeline extends MessageStreamTimeline { + /** + * When the MultiaddrConnection was upgraded to a Connection - the type of + * connection encryption and multiplexing was negotiated. + */ + upgraded?: number +} + +/** + * A MultiaddrConnection is returned by transports after dialing a peer. It is a + * low-level primitive and is the raw connection without encryption or stream + * multiplexing. + */ +export interface MultiaddrConnection extends MessageStream { + /** + * The address of the remote end of the connection + */ + remoteAddr: Multiaddr + + /** + * When stream life cycle events occurred + */ + timeline: MultiaddrConnectionTimeline + + /** + * Whether this connection is inbound or outbound + */ + direction: MultiaddrConnectionDirection +} diff --git a/packages/interface/src/pubsub.ts b/packages/interface/src/pubsub.ts index c003e59214..257eec8cc3 100644 --- a/packages/interface/src/pubsub.ts +++ b/packages/interface/src/pubsub.ts @@ -1,6 +1,4 @@ -import type { Stream } from './connection.js' -import type { PublicKey } from './keys.js' -import type { PeerId } from './peer-id.js' +import type { Stream, PublicKey, PeerId } from './index.js' import type { Pushable } from 'it-pushable' import type { TypedEventTarget } from 'main-event' import type { Uint8ArrayList } from 'uint8arraylist' diff --git a/packages/interface/src/stream-handler.ts b/packages/interface/src/stream-handler.ts index f6e362a3cb..9cefd09e59 100644 --- a/packages/interface/src/stream-handler.ts +++ b/packages/interface/src/stream-handler.ts @@ -1,23 +1,10 @@ -import type { Connection, Stream } from './connection.js' -import type { AbortOptions } from './index.ts' - -export interface IncomingStreamData { - /** - * The newly opened stream - */ - stream: Stream - - /** - * The connection the stream was opened on - */ - connection: Connection -} +import type { AbortOptions, Connection, Stream } from './index.ts' export interface StreamHandler { /** * A callback function that accepts the incoming stream data */ - (data: IncomingStreamData): void | Promise + (stream: Stream, connection: Connection): void | Promise } export interface StreamHandlerOptions extends AbortOptions { diff --git a/packages/interface/src/stream-muxer.ts b/packages/interface/src/stream-muxer.ts index fe75df5a51..4db8c3da1a 100644 --- a/packages/interface/src/stream-muxer.ts +++ b/packages/interface/src/stream-muxer.ts @@ -1,9 +1,7 @@ -import type { Direction, Stream } from './connection.js' -import type { AbortOptions, Logger } from './index.js' -import type { Duplex } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { Stream, TypedEventTarget, MessageStream } from './index.js' +import type { AbortOptions } from '@multiformats/multiaddr' -export interface StreamMuxerFactory { +export interface StreamMuxerFactory { /** * The protocol used to select this muxer during connection opening */ @@ -12,57 +10,62 @@ export interface StreamMuxerFactory { /** * Creates a new stream muxer to be used with a new connection */ - createStreamMuxer(init?: StreamMuxerInit): StreamMuxer + createStreamMuxer(maConn: MessageStream): Muxer } -/** - * A libp2p stream muxer - */ -export interface StreamMuxer extends Duplex> { +export interface StreamMuxerEvents { /** - * The protocol used to select this muxer during connection opening + * An incoming stream was created */ - protocol: string + stream: CustomEvent +} +export interface CreateStreamOptions extends AbortOptions { /** - * A list of streams that are currently open. Closed streams will not be returned. - */ - readonly streams: Stream[] - /** - * Initiate a new stream with the given name. If no name is - * provided, the id of the stream will be used. + * If a single protocol was requested and the muxer has support for this, + * pre-negotiate the protocol using this value, otherwise multistream-select + * will be run over the stream after opening. */ - newStream(name?: string): Stream | Promise + protocol?: string +} + +export type StreamMuxerStatus = 'open' | 'closing' | 'closed' +/** + * A libp2p stream muxer + */ +export interface StreamMuxer extends TypedEventTarget> { /** - * Close or abort all tracked streams and stop the muxer + * The protocol used to select this muxer during connection opening */ - close(options?: AbortOptions): Promise + protocol: string /** - * Close or abort all tracked streams and stop the muxer + * A list of streams that are currently open */ - abort(err: Error): void -} + streams: MuxedStream[] -export interface StreamMuxerInit { /** - * A callback function invoked every time an incoming stream is opened + * The status of the muxer */ - onIncomingStream?(stream: Stream): void + status: StreamMuxerStatus /** - * A callback function invoke every time a stream ends + * Create a new stream */ - onStreamEnd?(stream: Stream): void + createStream(options?: CreateStreamOptions): MuxedStream | Promise /** - * Outbound stream muxers are opened by the local node, inbound stream muxers are opened by the remote + * Immediately close the muxer, abort every open stream and discard any + * unsent/unread data. */ - direction?: Direction + abort (err: Error): void /** - * The logger used by the connection + * Gracefully close the muxer. All open streams will be gracefully closed, and + * the returned promise will either resolve when any/all unsent data has been + * sent, or it will reject if the passed abort signal fires before this + * happens. */ - log?: Logger + close (options?: AbortOptions): Promise } diff --git a/packages/interface/src/stream.ts b/packages/interface/src/stream.ts new file mode 100644 index 0000000000..a59cf4b3ae --- /dev/null +++ b/packages/interface/src/stream.ts @@ -0,0 +1,20 @@ +import type { MessageStream } from './message-stream.js' + +/** + * A Stream is a lightweight data channel between two peers that can be written + * to and read from at both ends. + * + * It may be encrypted and multiplexed depending on the configuration of the + * nodes. + */ +export interface Stream extends MessageStream { + /** + * Unique identifier for a stream. Identifiers are not unique across muxers. + */ + id: string + + /** + * The protocol negotiated for this stream + */ + protocol: string +} diff --git a/packages/interface/src/transport.ts b/packages/interface/src/transport.ts index 59b8498692..c00e780404 100644 --- a/packages/interface/src/transport.ts +++ b/packages/interface/src/transport.ts @@ -1,6 +1,4 @@ -import type { Connection, ConnectionLimits, MultiaddrConnection } from './connection.js' -import type { AbortOptions, ClearableSignal, ConnectionEncrypter } from './index.js' -import type { StreamMuxerFactory } from './stream-muxer.js' +import type { AbortOptions, ClearableSignal, ConnectionEncrypter, MultiaddrConnection, Connection, ConnectionLimits, StreamMuxerFactory, PeerId } from './index.js' import type { Multiaddr } from '@multiformats/multiaddr' import type { TypedEventTarget } from 'main-event' import type { ProgressOptions, ProgressEvent } from 'progress-events' @@ -29,16 +27,19 @@ export interface Listener extends TypedEventTarget { * Start a listener */ listen(multiaddr: Multiaddr): Promise + /** * Get listen addresses */ getAddrs(): Multiaddr[] + /** * Close listener * * @returns {Promise} */ close(): Promise + /** * Allows transports to amend announce addresses - to add certificate hashes * or other metadata that cannot be known before runtime @@ -134,15 +135,6 @@ export enum FaultTolerance { * Options accepted by the upgrader during connection establishment */ export interface UpgraderOptions extends ProgressOptions, Required { - /** - * If true the invoking transport is expected to implement it's own encryption - * and an encryption protocol will not attempted to be negotiated via - * multi-stream select - * - * @default false - */ - skipEncryption?: boolean - /** * If true no connection protection will be performed on the connection. */ @@ -172,6 +164,23 @@ export interface UpgraderOptions | ProgressEvent<'upgrader:multiplex-inbound-connection'> @@ -184,13 +193,15 @@ export interface Upgrader { /** * Upgrades an outbound connection created by the `dial` method of a transport */ - upgradeOutbound(maConn: MultiaddrConnection, opts?: UpgraderOptions): Promise + upgradeOutbound(maConn: MultiaddrConnection, opts: UpgraderOptions): Promise + upgradeOutbound(maConn: MultiaddrConnection, opts: UpgraderWithoutEncryptionOptions): Promise /** * Upgrades an inbound connection received by a transport listener and * notifies other libp2p components about the new connection */ - upgradeInbound(maConn: MultiaddrConnection, opts?: UpgraderOptions): Promise + upgradeInbound(maConn: MultiaddrConnection, opts: UpgraderOptions): Promise + upgradeInbound(maConn: MultiaddrConnection, opts: UpgraderWithoutEncryptionOptions): Promise /** * Used by transports that perform part of the upgrade process themselves and diff --git a/packages/kad-dht/package.json b/packages/kad-dht/package.json index fecb962b9d..f40b50006a 100644 --- a/packages/kad-dht/package.json +++ b/packages/kad-dht/package.json @@ -65,7 +65,6 @@ "it-merge": "^3.0.11", "it-parallel": "^3.0.11", "it-pipe": "^3.0.1", - "it-protobuf-stream": "^2.0.2", "it-pushable": "^3.2.3", "it-take": "^3.0.8", "main-event": "^1.0.1", @@ -80,7 +79,6 @@ "uint8arrays": "^5.1.0" }, "devDependencies": { - "@libp2p/interface-compliance-tests": "^6.4.16", "@libp2p/logger": "^5.1.21", "@libp2p/peer-store": "^11.2.6", "@types/lodash.random": "^3.2.9", @@ -95,8 +93,6 @@ "it-filter": "^3.1.3", "it-last": "^3.0.8", "it-length-prefixed": "^10.0.1", - "it-pair": "^2.0.6", - "it-stream-types": "^2.0.2", "lodash.random": "^3.2.0", "lodash.range": "^3.2.0", "p-retry": "^6.2.1", diff --git a/packages/kad-dht/src/content-routing/index.ts b/packages/kad-dht/src/content-routing/index.ts index abaf1137df..6e30bdba06 100644 --- a/packages/kad-dht/src/content-routing/index.ts +++ b/packages/kad-dht/src/content-routing/index.ts @@ -1,5 +1,5 @@ import { PeerSet } from '@libp2p/peer-collections' -import { Queue } from '@libp2p/utils/queue' +import { Queue } from '@libp2p/utils' import { pushable } from 'it-pushable' import { ALPHA } from '../constants.js' import { MessageType } from '../message/dht.js' diff --git a/packages/kad-dht/src/index.ts b/packages/kad-dht/src/index.ts index 4f556bee0e..16c2409da1 100644 --- a/packages/kad-dht/src/index.ts +++ b/packages/kad-dht/src/index.ts @@ -138,7 +138,7 @@ import { removePrivateAddressesMapper, removePublicAddressesMapper, passthroughM import type { Libp2pEvents, ComponentLogger, Metrics, PeerId, PeerInfo, PeerStore, RoutingOptions, PrivateKey, AbortOptions } from '@libp2p/interface' import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { Ping } from '@libp2p/ping' -import type { AdaptiveTimeoutInit } from '@libp2p/utils/adaptive-timeout' +import type { AdaptiveTimeoutInit } from '@libp2p/utils' import type { Datastore } from 'interface-datastore' import type { TypedEventTarget } from 'main-event' import type { CID } from 'multiformats/cid' diff --git a/packages/kad-dht/src/network.ts b/packages/kad-dht/src/network.ts index 888ac53053..3e4b505a9d 100644 --- a/packages/kad-dht/src/network.ts +++ b/packages/kad-dht/src/network.ts @@ -1,7 +1,6 @@ import { InvalidParametersError } from '@libp2p/interface' import { Libp2pRecord } from '@libp2p/record' -import { AdaptiveTimeout } from '@libp2p/utils/adaptive-timeout' -import { pbStream } from 'it-protobuf-stream' +import { AdaptiveTimeout, pbStream } from '@libp2p/utils' import { TypedEventEmitter } from 'main-event' import { Message } from './message/dht.js' import { fromPbPeerInfo } from './message/utils.js' @@ -13,7 +12,7 @@ import { } from './query/events.js' import type { DisjointPath, KadDHTComponents, QueryEvent } from './index.js' import type { AbortOptions, Logger, Stream, PeerId, PeerInfo, Startable, RoutingOptions, CounterGroup } from '@libp2p/interface' -import type { AdaptiveTimeoutInit } from '@libp2p/utils/adaptive-timeout' +import type { AdaptiveTimeoutInit } from '@libp2p/utils' export interface NetworkInit { protocol: string @@ -186,7 +185,7 @@ export class Network extends TypedEventEmitter implements Startab const response = await this._writeReadMessage(stream, msg, options) - stream.close(options) + stream.closeWrite(options) .catch(err => { this.log.error('error closing stream to %p', to, err) stream?.abort(err) @@ -253,7 +252,7 @@ export class Network extends TypedEventEmitter implements Startab await this._writeMessage(stream, msg, options) - stream.close(options) + stream.closeWrite(options) .catch(err => { this.log.error('error closing stream to %p', to, err) stream?.abort(err) diff --git a/packages/kad-dht/src/query/manager.ts b/packages/kad-dht/src/query/manager.ts index cd1c7698b7..34b8a55ff0 100644 --- a/packages/kad-dht/src/query/manager.ts +++ b/packages/kad-dht/src/query/manager.ts @@ -1,4 +1,4 @@ -import { createScalableCuckooFilter } from '@libp2p/utils/filters' +import { createScalableCuckooFilter } from '@libp2p/utils' import { anySignal } from 'any-signal' import merge from 'it-merge' import { setMaxListeners } from 'main-event' diff --git a/packages/kad-dht/src/query/query-path.ts b/packages/kad-dht/src/query/query-path.ts index baab9f0447..bb35475cc2 100644 --- a/packages/kad-dht/src/query/query-path.ts +++ b/packages/kad-dht/src/query/query-path.ts @@ -1,5 +1,5 @@ import { AbortError } from '@libp2p/interface' -import { Queue } from '@libp2p/utils/queue' +import { Queue } from '@libp2p/utils' import { pushable } from 'it-pushable' import { xor as uint8ArrayXor } from 'uint8arrays/xor' import { xorCompare as uint8ArrayXorCompare } from 'uint8arrays/xor-compare' @@ -9,7 +9,7 @@ import type { QueryEvent } from '../index.js' import type { QueryFunc } from '../query/types.js' import type { Logger, PeerId, RoutingOptions, AbortOptions, PeerInfo } from '@libp2p/interface' import type { ConnectionManager } from '@libp2p/interface-internal' -import type { Filter } from '@libp2p/utils/filters' +import type { Filter } from '@libp2p/utils' export interface QueryPathOptions extends RoutingOptions { /** diff --git a/packages/kad-dht/src/reprovider.ts b/packages/kad-dht/src/reprovider.ts index 4420da5997..98507bfa32 100644 --- a/packages/kad-dht/src/reprovider.ts +++ b/packages/kad-dht/src/reprovider.ts @@ -1,5 +1,4 @@ -import { AdaptiveTimeout } from '@libp2p/utils/adaptive-timeout' -import { Queue } from '@libp2p/utils/queue' +import { AdaptiveTimeout, Queue } from '@libp2p/utils' import drain from 'it-drain' import { TypedEventEmitter, setMaxListeners } from 'main-event' import { PROVIDERS_VALIDITY, REPROVIDE_CONCURRENCY, REPROVIDE_INTERVAL, REPROVIDE_MAX_QUEUE_SIZE, REPROVIDE_THRESHOLD, REPROVIDE_TIMEOUT } from './constants.js' @@ -8,7 +7,7 @@ import type { ContentRouting } from './content-routing/index.js' import type { OperationMetrics } from './kad-dht.js' import type { AbortOptions, ComponentLogger, Logger, Metrics, PeerId } from '@libp2p/interface' import type { AddressManager } from '@libp2p/interface-internal' -import type { AdaptiveTimeoutInit } from '@libp2p/utils/adaptive-timeout' +import type { AdaptiveTimeoutInit } from '@libp2p/utils' import type { Datastore } from 'interface-datastore' import type { CID } from 'multiformats/cid' diff --git a/packages/kad-dht/src/routing-table/index.ts b/packages/kad-dht/src/routing-table/index.ts index d373d2889f..4a9a4f747d 100644 --- a/packages/kad-dht/src/routing-table/index.ts +++ b/packages/kad-dht/src/routing-table/index.ts @@ -1,6 +1,5 @@ import { start, stop } from '@libp2p/interface' -import { AdaptiveTimeout } from '@libp2p/utils/adaptive-timeout' -import { PeerQueue } from '@libp2p/utils/peer-queue' +import { AdaptiveTimeout, PeerQueue } from '@libp2p/utils' import { anySignal } from 'any-signal' import parallel from 'it-parallel' import { TypedEventEmitter, setMaxListeners } from 'main-event' @@ -11,7 +10,7 @@ import type { Bucket, GetClosestPeersOptions, LeafBucket, Peer } from './k-bucke import type { Network } from '../network.js' import type { AbortOptions, ComponentLogger, CounterGroup, Logger, Metric, Metrics, PeerId, PeerStore, Startable, Stream } from '@libp2p/interface' import type { Ping } from '@libp2p/ping' -import type { AdaptiveTimeoutInit } from '@libp2p/utils/adaptive-timeout' +import type { AdaptiveTimeoutInit } from '@libp2p/utils' export const KBUCKET_SIZE = 20 export const PREFIX_LENGTH = 6 diff --git a/packages/kad-dht/src/rpc/index.ts b/packages/kad-dht/src/rpc/index.ts index a9a14912df..d2220a41b0 100644 --- a/packages/kad-dht/src/rpc/index.ts +++ b/packages/kad-dht/src/rpc/index.ts @@ -1,5 +1,5 @@ import { TimeoutError } from '@libp2p/interface' -import { pbStream } from 'it-protobuf-stream' +import { pbStream } from '@libp2p/utils' import { Message, MessageType } from '../message/dht.js' import { AddProviderHandler } from './handlers/add-provider.js' import { FindNodeHandler } from './handlers/find-node.js' @@ -15,7 +15,7 @@ import type { GetProvidersHandlerComponents } from './handlers/get-providers.js' import type { GetValueHandlerComponents } from './handlers/get-value.js' import type { PutValueHandlerComponents } from './handlers/put-value.js' import type { RoutingTable } from '../routing-table/index.js' -import type { CounterGroup, Logger, Metrics, PeerId, IncomingStreamData, MetricGroup } from '@libp2p/interface' +import type { CounterGroup, Logger, Metrics, PeerId, MetricGroup, Connection, Stream } from '@libp2p/interface' export interface DHTMessageHandler { handle(peerId: PeerId, msg: Message): Promise @@ -95,12 +95,10 @@ export class RPC { /** * Handle incoming streams on the dht protocol */ - onIncomingStream (data: IncomingStreamData): void { + onIncomingStream (stream: Stream, connection: Connection): void { const message = 'unknown' Promise.resolve().then(async () => { - const { stream, connection } = data - const abortListener = (): void => { stream.abort(new TimeoutError()) } @@ -153,7 +151,7 @@ export class RPC { } }) .catch(err => { - this.log.error('error handling %s RPC message from %p - %e', message, data.connection.remotePeer, err) + this.log.error('error handling %s RPC message from %p - %e', message, connection.remotePeer, err) }) } } diff --git a/packages/kad-dht/src/utils.ts b/packages/kad-dht/src/utils.ts index 193cb83e40..660936118c 100644 --- a/packages/kad-dht/src/utils.ts +++ b/packages/kad-dht/src/utils.ts @@ -1,6 +1,6 @@ import { peerIdFromMultihash, peerIdFromString } from '@libp2p/peer-id' import { Libp2pRecord } from '@libp2p/record' -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { isPrivateIp } from '@libp2p/utils' import { Key } from 'interface-datastore/key' import { CID } from 'multiformats/cid' import * as raw from 'multiformats/codecs/raw' @@ -160,15 +160,6 @@ export function createPutRecord (key: Uint8Array, value: Uint8Array): Uint8Array return rec.serialize() } -export function debounce (callback: () => void, wait: number = 100): () => void { - let timeout: ReturnType - - return (): void => { - clearTimeout(timeout) - timeout = setTimeout(() => { callback() }, wait) - } -} - // see https://github.com/multiformats/multiaddr/blob/master/protocols.csv const P2P_CIRCUIT_CODE = 290 const DNS4_CODE = 54 diff --git a/packages/kad-dht/test/libp2p-routing.spec.ts b/packages/kad-dht/test/libp2p-routing.spec.ts index c3de7a564b..ce38cc34b4 100644 --- a/packages/kad-dht/test/libp2p-routing.spec.ts +++ b/packages/kad-dht/test/libp2p-routing.spec.ts @@ -1,12 +1,11 @@ import { contentRoutingSymbol, start, stop, peerRoutingSymbol } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import { MemoryDatastore } from 'datastore-core' import all from 'it-all' import map from 'it-map' -import { duplexPair } from 'it-pair/duplex' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter } from 'main-event' import { CID } from 'multiformats/cid' import pDefer from 'p-defer' @@ -40,22 +39,11 @@ interface StubbedKadDHTComponents { const PROTOCOL = '/test/dht/1.0.0' -function createStreams (peerId: PeerId, components: StubbedKadDHTComponents): { connection: Connection, incomingStream: Stream } { - const duplex = duplexPair() - const outgoingStream = stubInterface({ - close: async () => {} - }) - outgoingStream.source = duplex[0].source - outgoingStream.sink.callsFake(async source => duplex[0].sink(source)) - - const incomingStream = stubInterface({ - close: async () => {} - }) - incomingStream.source = duplex[1].source - incomingStream.sink.callsFake(async source => duplex[1].sink(source)) +async function createStreams (peerId: PeerId, components: StubbedKadDHTComponents): Promise<{ connection: Connection, incomingStream: Stream }> { + const [outboundStream, incomingStream] = await streamPair() const connection = stubInterface() - connection.newStream.withArgs(PROTOCOL).resolves(outgoingStream) + connection.newStream.withArgs(PROTOCOL).resolves(outboundStream) components.connectionManager.openConnection.withArgs(peerId).resolves(connection) return { @@ -151,7 +139,7 @@ describe('content routing', () => { const { connection, incomingStream - } = createStreams(remotePeer.id, components) + } = await createStreams(remotePeer.id, components) // a peer has connected const topology = components.registrar.register.getCall(0).args[1] @@ -193,7 +181,7 @@ describe('content routing', () => { const { connection, incomingStream - } = createStreams(remotePeer.id, components) + } = await createStreams(remotePeer.id, components) // a peer has connected const topology = components.registrar.register.getCall(0).args[1] @@ -292,7 +280,7 @@ describe('peer routing', () => { const { connection, incomingStream - } = createStreams(remotePeer.id, components) + } = await createStreams(remotePeer.id, components) // a peer has connected const topology = components.registrar.register.getCall(0).args[1] @@ -348,11 +336,11 @@ describe('peer routing', () => { const { connection, incomingStream - } = createStreams(remotePeer.id, components) + } = await createStreams(remotePeer.id, components) const { incomingStream: closestPeerIncomingStream - } = createStreams(closestPeer.id, components) + } = await createStreams(closestPeer.id, components) // a peer has connected const topology = components.registrar.register.getCall(0).args[1] diff --git a/packages/kad-dht/test/network.spec.ts b/packages/kad-dht/test/network.spec.ts index 64c5117eaf..25b9acdb9f 100644 --- a/packages/kad-dht/test/network.spec.ts +++ b/packages/kad-dht/test/network.spec.ts @@ -1,18 +1,17 @@ /* eslint-env mocha */ -import { mockStream } from '@libp2p/interface-compliance-tests/mocks' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import all from 'it-all' import * as lp from 'it-length-prefixed' import pDefer from 'p-defer' +import { stubInterface } from 'sinon-ts' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { Message, MessageType } from '../src/message/dht.js' import { TestDHT } from './utils/test-dht.js' import type { KadDHT } from '../src/kad-dht.js' import type { Connection, PeerId } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' -import type { Sink, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' describe('Network', () => { let dht: KadDHT @@ -65,34 +64,29 @@ describe('Network', () => { // mock it dht.components.connectionManager.openConnection = async (peer: PeerId | Multiaddr | Multiaddr[]) => { - // @ts-expect-error incomplete implementation - const connection: Connection = { - newStream: async (protocols: string | string[]) => { - const protocol = Array.isArray(protocols) ? protocols[0] : protocols - const msg: Partial = { - type: MessageType.FIND_NODE, - key: uint8ArrayFromString('world') - } - - const source = (async function * () { - yield lp.encode.single(Message.encode(msg)) - })() - - const sink: Sink, Promise> = async source => { - for await (const buf of lp.decode(source)) { - expect(Message.decode(buf).type).to.eql(MessageType.PING) - finish() - } - } - - const stream = mockStream({ source, sink }) - - return { - ...stream, - protocol - } + const [outboundStream, inboundStream] = await streamPair() + + inboundStream.addEventListener('message', (evt) => { + for (const buf of lp.decode([evt.data])) { + expect(Message.decode(buf).type).to.eql(MessageType.PING) + finish() } - } + }) + + queueMicrotask(() => { + const msg: Partial = { + type: MessageType.FIND_NODE, + key: uint8ArrayFromString('world') + } + + inboundStream.send(lp.encode.single(Message.encode(msg))) + }) + + const connection: Connection = stubInterface({ + newStream: async () => { + return outboundStream + } + }) return connection } diff --git a/packages/kad-dht/test/rpc/index.node.ts b/packages/kad-dht/test/rpc/index.node.ts index 16cbc347b1..9f78c52da2 100644 --- a/packages/kad-dht/test/rpc/index.node.ts +++ b/packages/kad-dht/test/rpc/index.node.ts @@ -1,15 +1,12 @@ /* eslint-env mocha */ import { start } from '@libp2p/interface' -import { mockStream } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' import { persistentPeerStore } from '@libp2p/peer-store' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import { MemoryDatastore } from 'datastore-core' -import all from 'it-all' import * as lp from 'it-length-prefixed' -import map from 'it-map' -import { pipe } from 'it-pipe' import { TypedEventEmitter } from 'main-event' import pDefer from 'p-defer' import Sinon from 'sinon' @@ -29,7 +26,6 @@ import type { PeerAndKey } from '../utils/create-peer-id.js' import type { Libp2pEvents, Connection, PeerStore } from '@libp2p/interface' import type { AddressManager } from '@libp2p/interface-internal' import type { Datastore } from 'interface-datastore' -import type { Duplex, Source } from 'it-stream-types' import type { SinonStubbedInstance } from 'sinon' describe('rpc', () => { @@ -92,32 +88,27 @@ describe('rpc', () => { peerRouting.getClosestPeersOffline.resolves([]) - const source = pipe( - [Message.encode(msg)], - (source) => lp.encode(source), - source => map(source, arr => new Uint8ArrayList(arr)), - (source) => all(source) - ) + const [outboundStream, incomingStream] = await streamPair() + + outboundStream.addEventListener('message', (evt) => { + const res: Uint8ArrayList[] = [] - const duplexStream: Duplex, Source, Promise> = { - source: (async function * () { - yield * source - })(), - sink: async (source) => { - const res = await pipe( - source, - (source) => lp.decode(source), - async (source) => all(source) - ) - validateMessage(res) + for (const buf of lp.decode([evt.data])) { + res.push(buf) } - } - rpc.onIncomingStream({ - stream: mockStream(duplexStream), - connection: stubInterface() + validateMessage(res) }) + queueMicrotask(() => { + outboundStream.send(lp.encode.single(Message.encode(msg))) + }) + + rpc.onIncomingStream( + incomingStream, + stubInterface() + ) + await defer.promise }) }) diff --git a/packages/kad-dht/test/utils/test-dht.ts b/packages/kad-dht/test/utils/test-dht.ts index 3e1ba997a1..8c114f9f77 100644 --- a/packages/kad-dht/test/utils/test-dht.ts +++ b/packages/kad-dht/test/utils/test-dht.ts @@ -1,6 +1,5 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' -import { mockRegistrar, mockConnectionManager, mockNetwork } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' import { persistentPeerStore } from '@libp2p/peer-store' @@ -18,6 +17,8 @@ import type { Libp2pEvents, PeerId, PeerStore } from '@libp2p/interface' import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { Ping } from '@libp2p/ping' +let memoryIndex = 0 + export class TestDHT { private readonly peers: Map @@ -35,7 +36,7 @@ export class TestDHT { peerId, privateKey, datastore: new MemoryDatastore(), - registrar: mockRegistrar(), + registrar: stubInterface(), addressManager: stubInterface(), peerStore: stubInterface(), connectionManager: stubInterface(), @@ -43,10 +44,7 @@ export class TestDHT { logger: defaultLogger(), ping: stubInterface() } - components.connectionManager = mockConnectionManager({ - ...components, - events - }) + components.connectionManager = stubInterface() components.peerStore = persistentPeerStore({ ...components, events @@ -54,16 +52,9 @@ export class TestDHT { await start(...Object.values(components)) - mockNetwork.addNode({ - ...components, - events - }) - const addressManager = stubInterface() addressManager.getAddresses.returns([ - multiaddr(`/ip4/127.0.0.1/tcp/4002/p2p/${components.peerId.toString()}`), - multiaddr(`/ip4/192.168.1.1/tcp/4002/p2p/${components.peerId.toString()}`), - multiaddr(`/ip4/85.3.31.0/tcp/4002/p2p/${components.peerId.toString()}`) + multiaddr(`/memory/${memoryIndex++}/p2p/${components.peerId.toString()}`) ]) components.addressManager = addressManager diff --git a/packages/keychain/src/keychain.ts b/packages/keychain/src/keychain.ts index 627a89f34b..85943ec7c4 100644 --- a/packages/keychain/src/keychain.ts +++ b/packages/keychain/src/keychain.ts @@ -3,7 +3,7 @@ import { pbkdf2, randomBytes } from '@libp2p/crypto' import { privateKeyToProtobuf } from '@libp2p/crypto/keys' import { InvalidParametersError, NotFoundError, serviceCapabilities } from '@libp2p/interface' -import { mergeOptions } from '@libp2p/utils/merge-options' +import { mergeOptions } from '@libp2p/utils' import { Key } from 'interface-datastore/key' import { base58btc } from 'multiformats/bases/base58' import { sha256 } from 'multiformats/hashes/sha2' diff --git a/packages/libp2p-daemon-client/.aegir.js b/packages/libp2p-daemon-client/.aegir.js new file mode 100644 index 0000000000..135a6a2211 --- /dev/null +++ b/packages/libp2p-daemon-client/.aegir.js @@ -0,0 +1,8 @@ + +export default { + build: { + config: { + platform: 'node' + } + } +} diff --git a/packages/libp2p-daemon-client/API.md b/packages/libp2p-daemon-client/API.md new file mode 100644 index 0000000000..67732fea6d --- /dev/null +++ b/packages/libp2p-daemon-client/API.md @@ -0,0 +1,500 @@ +# API + +* [Getting started](#getting-started) +* [`close`](#close) +* [`connect`](#connect) +* [`identify`](#identify) +* [`listPeers`](#listPeers) +* [`openStream`](#openStream) +* [`registerStream`](#registerStream) +* [`dht.put`](#dht.put) +* [`dht.get`](#dht.get) +* [`dht.findPeer`](#dht.findPeer) +* [`dht.provide`](#dht.provide) +* [`dht.findProviders`](#dht.findProviders) +* [`dht.getClosestPeers`](#dht.getClosestPeers) +* [`dht.getPublicKey`](#dht.getPublicKey) +* [`pubsub.getTopics`](#pubsub.getTopics) +* [`pubsub.publish`](#pubsub.publish) +* [`pubsub.subscribe`](#pubsub.subscribe) + +## Getting started + +Create a new daemon client, using a unix socket. + +### `Client(socketPath)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| socketPath | `String` | unix socket path | + +#### Returns + +Client instance + +#### Example + +```js +const Client = require('libp2p-daemon-client') + +const defaultSock = '/tmp/p2pd.sock' +const client = new Client(defaultSock) + +// client.{} +``` + +## close + +Closes the socket. + +### `client.close()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise` | Promise resolves when socket is closed | + +#### Example + +```js +const Client = require('libp2p-daemon-client') + +const defaultSock = '/tmp/p2pd.sock' +const client = new Client(defaultSock) + +// close the socket +await client.close() +``` + +## connect + +Requests a connection to a known peer on a given set of addresses. + +### `client.connect(peerId, addrs)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`](https://github.com/libp2p/js-peer-id) | peer ID to connect | +| options | `Object` | set of addresses to connect | + +#### Example + +```js +const client = new Client(defaultSock) + +try { + await client.connect(peerId, addrs) +} catch (err) { + // +} +``` + +## identify + +Query the daemon for its peer ID and listen addresses. + +### `client.identify()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Object` | Identify response | +| `Object.peerId` | Peer id of the daemon | +| `Object.addrs` | Addresses of the daemon | + +#### Example + +```js +const client = new Client(defaultSock) + +let identify + +try { + identify = await client.identify() +} catch (err) { + // +} +``` + +## listPeers + +Get a list of IDs of peers the node is connected to. + +### `client.listPeers()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | array of peer id's | +| `Array.` | Peer id of a node | + +#### Example + +```js +const client = new Client(defaultSock) + +let identify + +try { + identify = await client.identify() +} catch (err) { + // +} +``` + +## openStream + +Initiate an outbound stream to a peer on one of a set of protocols. + +### `client.openStream(peerId, protocol)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`](https://github.com/libp2p/js-peer-id) | peer ID to connect | +| protocol | `string` | protocol to use | + +#### Returns + +| Type | Description | +|------|-------------| +| `Socket` | socket to write data | + +#### Example + +```js +const protocol = '/protocol/1.0.0' +const client = new Client(defaultSock) + +let socket + +try { + socket = await client.openStream(peerId, protocol) +} catch (err) { + // +} + +socket.write(uint8ArrayFromString('data')) +``` + +## registerStreamHandler + +Register a handler for inbound streams on a given protocol. + +### `client.registerStreamHandler(path, protocol)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| path | `string` | socket path | +| protocol | `string` | protocol to use | + +#### Example + +```js +const protocol = '/protocol/1.0.0' +const client = new Client(defaultSock) + +await client.registerStreamHandler(path, protocol) +``` + +## dht.put + +Write a value to a key in the DHT. + +### `client.dht.put(key, value)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| key | `Uint8Array` | key to add to the dht | +| value | `Uint8Array` | value to add to the dht | + +#### Example + +```js +const client = new Client(defaultSock) + +const key = '/key' +const value = uint8ArrayFromString('oh hello there') + +try { + await client.dht.put(key, value) +} catch (err) { + // +} +``` + +## dht.get + +Query the DHT for a value stored through a key in the DHT. + +### `client.dht.get(key)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| key | `Uint8Array` | key to get from the dht | + +#### Returns + +| Type | Description | +|------|-------------| +| `Uint8Array` | Value obtained from the DHT | + +#### Example + +```js +const client = new Client(defaultSock) + +const key = '/key' +let value + +try { + value = await client.dht.get(key, value) +} catch (err) { + // +} +``` + +## dht.findPeer + +Query the DHT for a given peer's known addresses. + +### `client.dht.findPeer(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`](https://github.com/libp2p/js-peer-id) | ID of the peer to find | + +#### Returns + +| Type | Description | +|------|-------------| +| `PeerInfo` | Peer info of a known peer | + +#### Example + +```js +const client = new Client(defaultSock) + +let peerInfo + +try { + peerInfo = await client.dht.findPeer(peerId) +} catch (err) { + // +} +``` + +## dht.provide + +Announce that have data addressed by a given CID. + +### `client.dht.provide(cid)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| cid | [`CID`](https://github.com/multiformats/js-cid) | cid to provide | + +#### Example + +```js +const client = new Client(defaultSock) + +try { + await client.dht.provide(cid) +} catch (err) { + // +} +``` + +## dht.findProviders + +Query the DHT for peers that have a piece of content, identified by a CID. + +### `client.dht.findProviders(cid, [count])` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| cid | [`CID`](https://github.com/multiformats/js-cid) | cid to find | +| count | `number` | number or results aimed | + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | array of peer info | +| `Array.` | Peer info of a node | + +#### Example + +```js +const client = new Client(defaultSock) + +let peerInfos + +try { + peerInfos = await client.dht.findProviders(cid) +} catch (err) { + // +} +``` + +## dht.getClosestPeers + +Query the DHT routing table for peers that are closest to a provided key. + +### `client.dht.getClosestPeers(key)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| key | `Uint8Array` | key to get from the dht | + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | array of peer info | +| `Array.` | Peer info of a node | + +#### Example + +```js +const client = new Client(defaultSock) + +let peerInfos + +try { + peerInfos = await client.dht.getClosestPeers(key) +} catch (err) { + // +} +``` + +## dht.getPublicKey + +Query the DHT routing table for a given peer's public key. + +### `client.dht.getPublicKey(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`](https://github.com/libp2p/js-peer-id) | ID of the peer to find | + +#### Returns + +| Type | Description | +|------|-------------| +| `PublicKey` | public key of the peer | + +#### Example + +```js +const client = new Client(defaultSock) + +let publicKey + +try { + publicKey = await client.dht.getPublicKey(peerId) +} catch (err) { + // +} +``` + +### `client.pubsub.getTopics()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | topics the node is subscribed to | + +#### Example + +```js +const client = new Client(defaultSock) + +let topics + +try { + topics = await client.pubsub.getTopics() +} catch (err) { + // +} +``` + +### `client.pubsub.publish()` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| topic | `string` | topic to publish | +| data | `Uint8Array` | data to publish | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise` | publish success | + +#### Example + +```js +const topic = 'topic' +const data = uint8ArrayFromString('data') +const client = new Client(defaultSock) + +try { + await client.pubsub.publish(topic, data) +} catch (err) { + // +} +``` + +### `client.pubsub.subscribe()` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| topic | `string` | topic to subscribe | + +#### Returns + +| Type | Description | +|------|-------------| +| `AsyncIterator` | data published | + +#### Example + +```js +const topic = 'topic' +const client = new Client(defaultSock) + +for await (const msg of client.pubsub.subscribe(topic)) { + // msg.data - pubsub data received +} +``` diff --git a/packages/libp2p-daemon-client/CHANGELOG.md b/packages/libp2p-daemon-client/CHANGELOG.md new file mode 100644 index 0000000000..160f72ca1a --- /dev/null +++ b/packages/libp2p-daemon-client/CHANGELOG.md @@ -0,0 +1,628 @@ +## [@libp2p/daemon-client-v9.0.8](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.7...@libp2p/daemon-client-9.0.8) (2025-08-13) + +### Bug Fixes + +* update all deps ([#348](https://github.com/libp2p/js-libp2p-daemon/issues/348)) ([44bf148](https://github.com/libp2p/js-libp2p-daemon/commit/44bf148f37b9f4091dc8ed26cf343e196607ebbc)) + +## [@libp2p/daemon-client-v9.0.7](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.6...@libp2p/daemon-client-9.0.7) (2025-08-13) + +### Dependencies + +* bump aegir from 46.0.5 to 47.0.21 ([#343](https://github.com/libp2p/js-libp2p-daemon/issues/343)) ([704c22f](https://github.com/libp2p/js-libp2p-daemon/commit/704c22f102362c6036642a73979d262e1214baa5)) + +## [@libp2p/daemon-client-v9.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.5...@libp2p/daemon-client-9.0.6) (2025-05-19) + +### Dependencies + +* bump aegir from 45.2.1 to 46.0.2 ([#297](https://github.com/libp2p/js-libp2p-daemon/issues/297)) ([09c1457](https://github.com/libp2p/js-libp2p-daemon/commit/09c1457ce93a45cab43869892cd9174617a34c29)) + +## [@libp2p/daemon-client-v9.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.4...@libp2p/daemon-client-9.0.5) (2025-02-24) + +### Bug Fixes + +* pass abort signal to dial ([#285](https://github.com/libp2p/js-libp2p-daemon/issues/285)) ([a739825](https://github.com/libp2p/js-libp2p-daemon/commit/a7398251d9c77f357ffcacb83fa7ebcd1039b114)) + +## [@libp2p/daemon-client-v9.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.3...@libp2p/daemon-client-9.0.4) (2024-11-08) + +### Bug Fixes + +* ts-expect-error masking actual error ([25381fc](https://github.com/libp2p/js-libp2p-daemon/commit/25381fc857c3a1f50c594eaa7d727c908af14796)) + +## [@libp2p/daemon-client-v9.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-9.0.2...@libp2p/daemon-client-9.0.3) (2024-11-08) + +### Bug Fixes + +* fix mismatched versions ([c6d619f](https://github.com/libp2p/js-libp2p-daemon/commit/c6d619f9af99a5a28089aeec2f047ca1fca1f2e1)) +* more mismatched versions ([327942a](https://github.com/libp2p/js-libp2p-daemon/commit/327942a44835533ebbc931ad33f03e8c1c9d4b28)) +* update project config ([4cf3a98](https://github.com/libp2p/js-libp2p-daemon/commit/4cf3a98dd76f8a41ef7f70d9e1696f2a06049f69)) + +## @libp2p/daemon-client [8.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-8.0.5...@libp2p/daemon-client-8.0.6) (2024-08-01) + + +### Dependencies + +* **dev:** bump sinon from 17.0.2 to 18.0.0 ([#267](https://github.com/libp2p/js-libp2p-daemon/issues/267)) ([e9fb07a](https://github.com/libp2p/js-libp2p-daemon/commit/e9fb07a3b3350a6dcd8f4ff64fb63eba456dccaf)) + + + +### Dependencies + +* **@libp2p/daemon-server:** upgraded to 7.0.6 + +## @libp2p/daemon-client [8.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-8.0.4...@libp2p/daemon-client-8.0.5) (2024-04-29) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 11.2.1 to 13.0.0 ([#265](https://github.com/libp2p/js-libp2p-daemon/issues/265)) ([bcd5041](https://github.com/libp2p/js-libp2p-daemon/commit/bcd504110f58f32977f31ec38989180187ab8bc2)) + + + +### Dependencies + +* **@libp2p/daemon-server:** upgraded to 7.0.5 + +## @libp2p/daemon-client [8.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-8.0.3...@libp2p/daemon-client-8.0.4) (2024-02-07) + + +### Dependencies + +* bump @libp2p/kad-dht from 11.0.8 to 12.0.5 ([#261](https://github.com/libp2p/js-libp2p-daemon/issues/261)) ([1cbaa23](https://github.com/libp2p/js-libp2p-daemon/commit/1cbaa23c0071e8d599fcef0859de41ac04f2606d)) +* bump uint8arrays from 4.0.10 to 5.0.1 ([#263](https://github.com/libp2p/js-libp2p-daemon/issues/263)) ([b5eb311](https://github.com/libp2p/js-libp2p-daemon/commit/b5eb3114be41176f47fd49164322285aaa8549c1)) + + + +### Dependencies + +* **@libp2p/daemon-server:** upgraded to 7.0.4 + +## @libp2p/daemon-client [8.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v8.0.2...@libp2p/daemon-client-8.0.3) (2024-02-07) + + +### Dependencies + +* bump aegir from 41.3.5 to 42.2.3 ([#262](https://github.com/libp2p/js-libp2p-daemon/issues/262)) ([2bb9733](https://github.com/libp2p/js-libp2p-daemon/commit/2bb97338d76e4cc48490326083fb13bd9ae60a74)) + + + +### Dependencies + +* **@libp2p/daemon-protocol:** upgraded to 6.0.2 +* **@libp2p/daemon-server:** upgraded to 7.0.3 + +## [@libp2p/daemon-client-v8.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v8.0.1...@libp2p/daemon-client-v8.0.2) (2024-02-07) + + +### Dependencies + +* bump multiformats from 12.1.3 to 13.0.1 ([#253](https://github.com/libp2p/js-libp2p-daemon/issues/253)) ([aebd43a](https://github.com/libp2p/js-libp2p-daemon/commit/aebd43ac1e7abae209ce4cc198989c8161a1b022)) +* bump uint8arrays from 4.0.10 to 5.0.1 ([#248](https://github.com/libp2p/js-libp2p-daemon/issues/248)) ([290bb2a](https://github.com/libp2p/js-libp2p-daemon/commit/290bb2ac7c3bf1cdb5174b60010888fbd91a2f17)) + +## [@libp2p/daemon-client-v8.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v8.0.0...@libp2p/daemon-client-v8.0.1) (2023-12-04) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 10.1.1 to 11.0.0 ([#244](https://github.com/libp2p/js-libp2p-daemon/issues/244)) ([6cc8c3c](https://github.com/libp2p/js-libp2p-daemon/commit/6cc8c3c96316fcb4dc32f24dc7d25414dec5f80d)) + +## [@libp2p/daemon-client-v8.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v7.0.3...@libp2p/daemon-client-v8.0.0) (2023-11-30) + + +### ⚠ BREAKING CHANGES + +* updates js-libp2p to v1 + +### Trivial Changes + +* update sibling dependencies ([3e711d1](https://github.com/libp2p/js-libp2p-daemon/commit/3e711d137983192db220d549c768cb4d82cf7f73)) +* update sibling dependencies ([865cb8a](https://github.com/libp2p/js-libp2p-daemon/commit/865cb8a7bf165092f90455dcc895ffa7e97df432)) + + +### Dependencies + +* update libp2p to v1 ([#235](https://github.com/libp2p/js-libp2p-daemon/issues/235)) ([6f2917b](https://github.com/libp2p/js-libp2p-daemon/commit/6f2917b714756e3632ff6c522668f7c2166d4389)) + +## [@libp2p/daemon-client-v7.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v7.0.2...@libp2p/daemon-client-v7.0.3) (2023-11-10) + + +### Bug Fixes + +* add logger field ([#234](https://github.com/libp2p/js-libp2p-daemon/issues/234)) ([6f4728c](https://github.com/libp2p/js-libp2p-daemon/commit/6f4728c447859db17aaee613060b67271922fc2a)) + +## [@libp2p/daemon-client-v7.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v7.0.1...@libp2p/daemon-client-v7.0.2) (2023-11-02) + + +### Dependencies + +* bump aegir from 40.0.13 to 41.1.6 ([#232](https://github.com/libp2p/js-libp2p-daemon/issues/232)) ([653c74b](https://github.com/libp2p/js-libp2p-daemon/commit/653c74b6272fd6d11d686bf7bb44b49b6757b633)) +* **dev:** bump sinon from 15.2.0 to 17.0.1 ([#231](https://github.com/libp2p/js-libp2p-daemon/issues/231)) ([591b464](https://github.com/libp2p/js-libp2p-daemon/commit/591b464c3517948049dc169157333e774ca21f3c)) +* **dev:** bump sinon-ts from 1.0.2 to 2.0.0 ([#233](https://github.com/libp2p/js-libp2p-daemon/issues/233)) ([de13473](https://github.com/libp2p/js-libp2p-daemon/commit/de13473ffd981c0488c27402e16c134f49e4b526)) + +## [@libp2p/daemon-client-v7.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v7.0.0...@libp2p/daemon-client-v7.0.1) (2023-08-04) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 9.1.0 to 10.0.0 ([#214](https://github.com/libp2p/js-libp2p-daemon/issues/214)) ([0308811](https://github.com/libp2p/js-libp2p-daemon/commit/0308811a2ea29d20de3f6a43db32720f21fb9b3f)) + +## [@libp2p/daemon-client-v7.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v6.0.3...@libp2p/daemon-client-v7.0.0) (2023-07-31) + + +### ⚠ BREAKING CHANGES + +* stream close methods are now asyc, requires libp2p@0.46.x or later + +### Features + +* close streams gracefully ([#213](https://github.com/libp2p/js-libp2p-daemon/issues/213)) ([92eebfa](https://github.com/libp2p/js-libp2p-daemon/commit/92eebfa12ba1fb42ae6c9e164fb0d69647e62074)) + + +### Dependencies + +* bump aegir from 38.1.8 to 39.0.1 ([#202](https://github.com/libp2p/js-libp2p-daemon/issues/202)) ([3bf4027](https://github.com/libp2p/js-libp2p-daemon/commit/3bf402752a92c3ebb96435eaa7923ce22ef76ea0)) +* update sibling dependencies ([fdfca91](https://github.com/libp2p/js-libp2p-daemon/commit/fdfca91afb258620c282296055e385b410018a6a)) +* update sibling dependencies ([ba4dd19](https://github.com/libp2p/js-libp2p-daemon/commit/ba4dd190e0e4101291195d5ffdf6bd3f982ee457)) + +## [@libp2p/daemon-client-v6.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v6.0.2...@libp2p/daemon-client-v6.0.3) (2023-04-27) + + +### Bug Fixes + +* use interface-libp2p to ensure the correct services are set ([#203](https://github.com/libp2p/js-libp2p-daemon/issues/203)) ([8602a70](https://github.com/libp2p/js-libp2p-daemon/commit/8602a704e45cfa768ad55974d025b2d4be6f42a9)) + +## [@libp2p/daemon-client-v6.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v6.0.1...@libp2p/daemon-client-v6.0.2) (2023-04-24) + + +### Dependencies + +* bump @libp2p/interface-peer-store from 1.2.9 to 2.0.0 ([#201](https://github.com/libp2p/js-libp2p-daemon/issues/201)) ([9b146a8](https://github.com/libp2p/js-libp2p-daemon/commit/9b146a8c38c30a13401be6da5259cd9da6bdc25c)) + +## [@libp2p/daemon-client-v6.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v6.0.0...@libp2p/daemon-client-v6.0.1) (2023-04-24) + + +### Dependencies + +* **dev:** bump @libp2p/interface-mocks from 10.0.3 to 11.0.0 ([#199](https://github.com/libp2p/js-libp2p-daemon/issues/199)) ([76f7b6f](https://github.com/libp2p/js-libp2p-daemon/commit/76f7b6fdd1af129ac278c5d2313d466db3e28a78)) + +## [@libp2p/daemon-client-v6.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v5.0.3...@libp2p/daemon-client-v6.0.0) (2023-04-19) + + +### ⚠ BREAKING CHANGES + +* the type of the source/sink properties have changed + +### Dependencies + +* update it-stream-types to 2.x.x ([#196](https://github.com/libp2p/js-libp2p-daemon/issues/196)) ([a09f6d5](https://github.com/libp2p/js-libp2p-daemon/commit/a09f6d58942033b08b579735aaa1537b3a324776)) +* update sibling dependencies ([db50405](https://github.com/libp2p/js-libp2p-daemon/commit/db50405ddec3a68ad265c3d3233595187bc4895d)) +* update sibling dependencies ([e0ec5ec](https://github.com/libp2p/js-libp2p-daemon/commit/e0ec5ecf5bfd7f801274d37d51c3dcce652de2ba)) + +## [@libp2p/daemon-client-v5.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v5.0.2...@libp2p/daemon-client-v5.0.3) (2023-04-12) + + +### Dependencies + +* bump @libp2p/interface-connection from 3.1.1 to 4.0.0 ([#195](https://github.com/libp2p/js-libp2p-daemon/issues/195)) ([798ecc5](https://github.com/libp2p/js-libp2p-daemon/commit/798ecc594bc64c8e34aad13e1b9884011f0b1f29)) + +## [@libp2p/daemon-client-v5.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v5.0.1...@libp2p/daemon-client-v5.0.2) (2023-04-03) + + +### Dependencies + +* update all it-* deps to the latest versions ([#193](https://github.com/libp2p/js-libp2p-daemon/issues/193)) ([cb0aa85](https://github.com/libp2p/js-libp2p-daemon/commit/cb0aa85bbbad651db088594622a9438a127d2a10)) + +## [@libp2p/daemon-client-v5.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v5.0.0...@libp2p/daemon-client-v5.0.1) (2023-03-17) + + +### Dependencies + +* bump @multiformats/multiaddr from 11.6.1 to 12.0.0 ([#189](https://github.com/libp2p/js-libp2p-daemon/issues/189)) ([aaf7e2e](https://github.com/libp2p/js-libp2p-daemon/commit/aaf7e2e37423cae78cd16d8e16e06db40fdcd1e3)) + +## [@libp2p/daemon-client-v5.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v4.1.0...@libp2p/daemon-client-v5.0.0) (2023-02-24) + + +### ⚠ BREAKING CHANGES + +* update pubsub subscribe method to return subscription (#186) + +### Bug Fixes + +* update pubsub subscribe method to return subscription ([#186](https://github.com/libp2p/js-libp2p-daemon/issues/186)) ([88e4bf5](https://github.com/libp2p/js-libp2p-daemon/commit/88e4bf54ee5189e808cee451f08467c7db302b8d)) + +## [@libp2p/daemon-client-v4.1.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v4.0.3...@libp2p/daemon-client-v4.1.0) (2023-02-23) + + +### Features + +* add get subscribers for pubsub topics ([#184](https://github.com/libp2p/js-libp2p-daemon/issues/184)) ([c8be43e](https://github.com/libp2p/js-libp2p-daemon/commit/c8be43e5acd6a74cfdd01857343af6f6d8210d5d)) + +## [@libp2p/daemon-client-v4.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v4.0.2...@libp2p/daemon-client-v4.0.3) (2023-02-22) + + +### Dependencies + +* bump aegir from 37.12.1 to 38.1.6 ([#183](https://github.com/libp2p/js-libp2p-daemon/issues/183)) ([6725a0a](https://github.com/libp2p/js-libp2p-daemon/commit/6725a0aeba9acb56a7530dece6c65a0f3eadfec5)) + +## [@libp2p/daemon-client-v4.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v4.0.1...@libp2p/daemon-client-v4.0.2) (2023-02-22) + + +### Trivial Changes + +* remove lerna ([#171](https://github.com/libp2p/js-libp2p-daemon/issues/171)) ([367f912](https://github.com/libp2p/js-libp2p-daemon/commit/367f9122f2fe1c31c8de7a136cda18d024ff08d7)) +* replace err-code with CodeError ([#172](https://github.com/libp2p/js-libp2p-daemon/issues/172)) ([c330fd5](https://github.com/libp2p/js-libp2p-daemon/commit/c330fd5fabac7efb016d1f23e781ce88c38a3b37)), closes [#1269](https://github.com/libp2p/js-libp2p-daemon/issues/1269) + + +### Dependencies + +* **dev:** bump sinon from 14.0.2 to 15.0.1 ([#166](https://github.com/libp2p/js-libp2p-daemon/issues/166)) ([1702efb](https://github.com/libp2p/js-libp2p-daemon/commit/1702efb4248bea4cb9ec19c694c1caae1c0ff16d)) + +## [@libp2p/daemon-client-v4.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v4.0.0...@libp2p/daemon-client-v4.0.1) (2023-01-07) + + +### Dependencies + +* bump @libp2p/tcp from 5.0.2 to 6.0.8 ([#165](https://github.com/libp2p/js-libp2p-daemon/issues/165)) ([fb676ab](https://github.com/libp2p/js-libp2p-daemon/commit/fb676ab66348b3c704d2385b4da0d7173bc4a04d)) + +## [@libp2p/daemon-client-v4.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.6...@libp2p/daemon-client-v4.0.0) (2023-01-07) + + +### ⚠ BREAKING CHANGES + +* Update multiformats and related dependencies (#170) + +### Dependencies + +* Update multiformats and related dependencies ([#170](https://github.com/libp2p/js-libp2p-daemon/issues/170)) ([06744a7](https://github.com/libp2p/js-libp2p-daemon/commit/06744a77006dc77dcfb7bd860e4dc6f36a535603)) +* update sibling dependencies ([775bd83](https://github.com/libp2p/js-libp2p-daemon/commit/775bd83a63ae99c4b892f0169f76dbe39163e2d4)) + +## [@libp2p/daemon-client-v3.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.5...@libp2p/daemon-client-v3.0.6) (2022-10-17) + + +### Dependencies + +* **dev:** bump it-all from 1.0.6 to 2.0.0 ([#148](https://github.com/libp2p/js-libp2p-daemon/issues/148)) ([1caa500](https://github.com/libp2p/js-libp2p-daemon/commit/1caa5006157e864bcbe4efb8f9474328b08821c3)) + +## [@libp2p/daemon-client-v3.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.4...@libp2p/daemon-client-v3.0.5) (2022-10-14) + + +### Bug Fixes + +* handle empty responses ([#145](https://github.com/libp2p/js-libp2p-daemon/issues/145)) ([0dfb823](https://github.com/libp2p/js-libp2p-daemon/commit/0dfb8236a0ab57a55fa0ebb91ac7a776a9f709da)) + +## [@libp2p/daemon-client-v3.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.3...@libp2p/daemon-client-v3.0.4) (2022-10-14) + + +### Dependencies + +* **dev:** bump sinon-ts from 0.0.2 to 1.0.0 ([#144](https://github.com/libp2p/js-libp2p-daemon/issues/144)) ([cfc8755](https://github.com/libp2p/js-libp2p-daemon/commit/cfc8755aa1280ac4fc2aae67cf47d7b0b93f605d)) + +## [@libp2p/daemon-client-v3.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.2...@libp2p/daemon-client-v3.0.3) (2022-10-13) + + +### Dependencies + +* update uint8arrays, protons and multiformats ([#143](https://github.com/libp2p/js-libp2p-daemon/issues/143)) ([661139c](https://github.com/libp2p/js-libp2p-daemon/commit/661139c674c9994724e32227d7d9ae2c5da1cea2)) + +## [@libp2p/daemon-client-v3.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.1...@libp2p/daemon-client-v3.0.2) (2022-10-07) + + +### Dependencies + +* bump @libp2p/tcp from 3.0.0 to 4.0.1 ([4e64dce](https://github.com/libp2p/js-libp2p-daemon/commit/4e64dce5e6d18dadaa54a20fff7b2da8bbca11ae)) +* **dev:** bump @libp2p/components from 2.1.1 to 3.0.1 ([#133](https://github.com/libp2p/js-libp2p-daemon/issues/133)) ([6d75a57](https://github.com/libp2p/js-libp2p-daemon/commit/6d75a5742040a594c02aa92ee6acf4ef9080ebac)) +* **dev:** bump @libp2p/interface-mocks from 4.0.3 to 6.0.0 ([#130](https://github.com/libp2p/js-libp2p-daemon/issues/130)) ([3807d1d](https://github.com/libp2p/js-libp2p-daemon/commit/3807d1dd9b037938dbe3dd9e9fb2560489d5d603)) + +## [@libp2p/daemon-client-v3.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v3.0.0...@libp2p/daemon-client-v3.0.1) (2022-09-21) + + +### Dependencies + +* update @multiformats/multiaddr to 11.0.0 ([#128](https://github.com/libp2p/js-libp2p-daemon/issues/128)) ([885d901](https://github.com/libp2p/js-libp2p-daemon/commit/885d9013d82a62e6756b06350932df1242a13296)) + +## [@libp2p/daemon-client-v3.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v2.0.4...@libp2p/daemon-client-v3.0.0) (2022-09-09) + + +### ⚠ BREAKING CHANGES + +* the stream type returned by `client.openStream` has changed + +### Bug Fixes + +* allow opening remote streams ([#126](https://github.com/libp2p/js-libp2p-daemon/issues/126)) ([361cc57](https://github.com/libp2p/js-libp2p-daemon/commit/361cc5750de505ab0381ae43609c67d5d4f659a7)) + + +### Dependencies + +* update sibling dependencies ([56711c4](https://github.com/libp2p/js-libp2p-daemon/commit/56711c4f14b0cf2370b8612fe07d42ed2ac8363c)) +* update sibling dependencies ([c3ebd58](https://github.com/libp2p/js-libp2p-daemon/commit/c3ebd588abc36ef45667e8e4e4c0e220303b7510)) + +## [@libp2p/daemon-client-v2.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v2.0.3...@libp2p/daemon-client-v2.0.4) (2022-08-10) + + +### Bug Fixes + +* update all deps ([#124](https://github.com/libp2p/js-libp2p-daemon/issues/124)) ([5e46e1e](https://github.com/libp2p/js-libp2p-daemon/commit/5e46e1e26c23428046a6007ab158420d3d830145)) + +## [@libp2p/daemon-client-v2.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v2.0.2...@libp2p/daemon-client-v2.0.3) (2022-07-31) + + +### Trivial Changes + +* update project config ([#111](https://github.com/libp2p/js-libp2p-daemon/issues/111)) ([345e663](https://github.com/libp2p/js-libp2p-daemon/commit/345e663e34278e780fc2f3a6b595294f925c4521)) + + +### Dependencies + +* update uint8arraylist and protons deps ([#115](https://github.com/libp2p/js-libp2p-daemon/issues/115)) ([34a8334](https://github.com/libp2p/js-libp2p-daemon/commit/34a83340ba855a9c08319ae1cd735dfa8b71c248)) + +## [@libp2p/daemon-client-v2.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v2.0.1...@libp2p/daemon-client-v2.0.2) (2022-06-17) + + +### Trivial Changes + +* update deps ([#105](https://github.com/libp2p/js-libp2p-daemon/issues/105)) ([0bdab0e](https://github.com/libp2p/js-libp2p-daemon/commit/0bdab0ee254e32d6dca0e5fe239d4ef16db41b87)) + +## [@libp2p/daemon-client-v2.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v2.0.0...@libp2p/daemon-client-v2.0.1) (2022-06-15) + + +### Trivial Changes + +* update deps ([#103](https://github.com/libp2p/js-libp2p-daemon/issues/103)) ([2bfaa37](https://github.com/libp2p/js-libp2p-daemon/commit/2bfaa37e2f056dcd5de5a3882b77f52553c595d4)) + +## [@libp2p/daemon-client-v2.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.5...@libp2p/daemon-client-v2.0.0) (2022-06-15) + + +### ⚠ BREAKING CHANGES + +* uses new single-issue libp2p interface modules + +### Features + +* update to latest libp2p interfaces ([#102](https://github.com/libp2p/js-libp2p-daemon/issues/102)) ([f5e9121](https://github.com/libp2p/js-libp2p-daemon/commit/f5e91210654ab3c411e316c1c657356c037a0f6a)) + +## [@libp2p/daemon-client-v1.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.4...@libp2p/daemon-client-v1.0.5) (2022-05-25) + + +### Trivial Changes + +* update docs ([#91](https://github.com/libp2p/js-libp2p-daemon/issues/91)) ([5b072ff](https://github.com/libp2p/js-libp2p-daemon/commit/5b072ff89f30fd6cf55a3387bf0961c8ad78a22f)), closes [#83](https://github.com/libp2p/js-libp2p-daemon/issues/83) + +## [@libp2p/daemon-client-v1.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.3...@libp2p/daemon-client-v1.0.4) (2022-05-23) + + +### Bug Fixes + +* update deps ([#90](https://github.com/libp2p/js-libp2p-daemon/issues/90)) ([b50eba3](https://github.com/libp2p/js-libp2p-daemon/commit/b50eba3770e47969dbc30cbcf87c41672cd9c175)) + +## [@libp2p/daemon-client-v1.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.2...@libp2p/daemon-client-v1.0.3) (2022-05-10) + + +### Bug Fixes + +* encode enums correctly ([#86](https://github.com/libp2p/js-libp2p-daemon/issues/86)) ([6ce4633](https://github.com/libp2p/js-libp2p-daemon/commit/6ce4633f3db41ab66f9b8b1abbe84955dde3e9be)) + +## [@libp2p/daemon-client-v1.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.1...@libp2p/daemon-client-v1.0.2) (2022-04-20) + + +### Bug Fixes + +* update interfaces and deps ([#84](https://github.com/libp2p/js-libp2p-daemon/issues/84)) ([25173d5](https://github.com/libp2p/js-libp2p-daemon/commit/25173d5b2edf0e9dd9132707d349cdc862caecdb)) + +## [@libp2p/daemon-client-v1.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-client-v1.0.0...@libp2p/daemon-client-v1.0.1) (2022-04-07) + + +### Bug Fixes + +* remove protobufjs and replace with protons ([#81](https://github.com/libp2p/js-libp2p-daemon/issues/81)) ([78dd02a](https://github.com/libp2p/js-libp2p-daemon/commit/78dd02a679e55f22c7e24c1ee2b6f92a4679a0b9)) + + +### Trivial Changes + +* update aegir to latest version ([#80](https://github.com/libp2p/js-libp2p-daemon/issues/80)) ([3a98959](https://github.com/libp2p/js-libp2p-daemon/commit/3a98959617d9c19bba9fb064defee3d51acfcc29)) + +## @libp2p/daemon-client-v1.0.0 (2022-03-28) + + +### ⚠ BREAKING CHANGES + +* This module is now ESM only + +### Features + +* convert to typescript ([#78](https://github.com/libp2p/js-libp2p-daemon/issues/78)) ([f18b2a4](https://github.com/libp2p/js-libp2p-daemon/commit/f18b2a45871a2704db51b03e8583eefdcd13554c)) + +# [0.11.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.10.0...v0.11.0) (2022-01-17) + + +### Features + +* async peerstore ([#110](https://github.com/libp2p/js-libp2p-daemon-client/issues/110)) ([41dc8a5](https://github.com/libp2p/js-libp2p-daemon-client/commit/41dc8a59ce14447b9b5ab7ba9930f4140bda3652)) + + +### BREAKING CHANGES + +* peerstore methods are now all async + + + +# [0.10.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.9.0...v0.10.0) (2021-12-29) + + +### chore + +* update deps ([#103](https://github.com/libp2p/js-libp2p-daemon-client/issues/103)) ([cdbc4b2](https://github.com/libp2p/js-libp2p-daemon-client/commit/cdbc4b22f3599f33911be1b406b02d06515389b8)) + + +### BREAKING CHANGES + +* only node15+ is supported + + + +# [0.9.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.7.0...v0.9.0) (2021-11-18) + + + +# [0.7.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.6.0...v0.7.0) (2021-07-30) + + + +# [0.6.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.5.0...v0.6.0) (2021-05-04) + + + + +# [0.5.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.4.0...v0.5.0) (2020-08-23) + + +### Bug Fixes + +* replace node buffers with uint8arrays ([#42](https://github.com/libp2p/js-libp2p-daemon-client/issues/42)) ([33be887](https://github.com/libp2p/js-libp2p-daemon-client/commit/33be887)) + + +### BREAKING CHANGES + +* - All deps of this module now use uint8arrays in place of node buffers +- DHT keys/values are Uint8Arrays, not Strings or Buffers + +* chore: bump daemon dep + +Co-authored-by: Jacob Heun + + + + +# [0.4.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.3.1...v0.4.0) (2020-06-08) + + + + +## [0.3.1](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.3.0...v0.3.1) (2020-04-20) + + + + +# [0.3.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.2.2...v0.3.0) (2020-01-31) + + +### Chores + +* update deps ([#18](https://github.com/libp2p/js-libp2p-daemon-client/issues/18)) ([61813b9](https://github.com/libp2p/js-libp2p-daemon-client/commit/61813b9)) + + +### BREAKING CHANGES + +* api changed as attach is not needed anymore + +* chore: apply suggestions from code review + +Co-Authored-By: Jacob Heun + +* chore: update aegir + +* chore: update daemon version + +Co-authored-by: Jacob Heun + + + + +## [0.2.2](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.2.1...v0.2.2) (2019-09-05) + + + + +## [0.2.1](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.2.0...v0.2.1) (2019-07-09) + + +### Bug Fixes + +* **client.connect:** handle empty response ([#13](https://github.com/libp2p/js-libp2p-daemon-client/issues/13)) ([ace789d](https://github.com/libp2p/js-libp2p-daemon-client/commit/ace789d)) +* **client.connect:** handle unspecified error in response ([#12](https://github.com/libp2p/js-libp2p-daemon-client/issues/12)) ([7db681b](https://github.com/libp2p/js-libp2p-daemon-client/commit/7db681b)) + + + + +# [0.2.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.1.2...v0.2.0) (2019-07-09) + + +### Bug Fixes + +* use error as field name instead of ErrorResponse ([#14](https://github.com/libp2p/js-libp2p-daemon-client/issues/14)) ([0ff9eda](https://github.com/libp2p/js-libp2p-daemon-client/commit/0ff9eda)) + + +### BREAKING CHANGES + +* errors property name is now `error` instead of `ErrorResponse` + + + + +## [0.1.2](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.1.1...v0.1.2) (2019-03-29) + + +### Bug Fixes + +* dht find providers stream ([24eb727](https://github.com/libp2p/js-libp2p-daemon-client/commit/24eb727)) + + + + +## [0.1.1](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.1.0...v0.1.1) (2019-03-25) + + +### Bug Fixes + +* code review feedback ([7fd02d9](https://github.com/libp2p/js-libp2p-daemon-client/commit/7fd02d9)) + + +### Features + +* pubsub ([c485f50](https://github.com/libp2p/js-libp2p-daemon-client/commit/c485f50)) + + + + +# [0.1.0](https://github.com/libp2p/js-libp2p-daemon-client/compare/v0.0.4...v0.1.0) (2019-03-22) + + +### Bug Fixes + +* update code to work with latest daemon ([#6](https://github.com/libp2p/js-libp2p-daemon-client/issues/6)) ([0ada86c](https://github.com/libp2p/js-libp2p-daemon-client/commit/0ada86c)) + + + + +## [0.0.4](https://github.com/libp2p/js-libp2p-daemon-client/compare/0.0.3...v0.0.4) (2019-03-15) + + +### Features + +* streams ([7cefefd](https://github.com/libp2p/js-libp2p-daemon-client/commit/7cefefd)) + + + + +## [0.0.3](https://github.com/libp2p/js-libp2p-daemon-client/compare/0.0.2...0.0.3) (2019-02-13) + + +### Bug Fixes + +* connect should use peer id in bytes ([b9e4e44](https://github.com/libp2p/js-libp2p-daemon-client/commit/b9e4e44)) + + + + +## [0.0.2](https://github.com/libp2p/js-libp2p-daemon-client/compare/e748b7c...0.0.2) (2019-02-11) + + +### Bug Fixes + +* code review ([6ae7ce0](https://github.com/libp2p/js-libp2p-daemon-client/commit/6ae7ce0)) +* code review ([80e3d62](https://github.com/libp2p/js-libp2p-daemon-client/commit/80e3d62)) +* main on package.json ([8fcc62b](https://github.com/libp2p/js-libp2p-daemon-client/commit/8fcc62b)) + + +### Features + +* initial implementation ([e748b7c](https://github.com/libp2p/js-libp2p-daemon-client/commit/e748b7c)) diff --git a/packages/libp2p-daemon-client/CODE_OF_CONDUCT.md b/packages/libp2p-daemon-client/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..6b0fa54c54 --- /dev/null +++ b/packages/libp2p-daemon-client/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/libp2p-daemon-client/LICENSE-APACHE b/packages/libp2p-daemon-client/LICENSE-APACHE new file mode 100644 index 0000000000..b09cd7856d --- /dev/null +++ b/packages/libp2p-daemon-client/LICENSE-APACHE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/libp2p-daemon-client/LICENSE-MIT b/packages/libp2p-daemon-client/LICENSE-MIT new file mode 100644 index 0000000000..72dc60d84b --- /dev/null +++ b/packages/libp2p-daemon-client/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/libp2p-daemon-client/README.md b/packages/libp2p-daemon-client/README.md new file mode 100644 index 0000000000..edbe229b4d --- /dev/null +++ b/packages/libp2p-daemon-client/README.md @@ -0,0 +1,63 @@ +# @libp2p/daemon-client + +[![libp2p.io](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) +[![Discuss](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg?style=flat-square)](https://discuss.libp2p.io) +[![codecov](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-daemon.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-daemon) +[![CI](https://img.shields.io/github/actions/workflow/status/libp2p/js-libp2p-daemon/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/libp2p/js-libp2p-daemon/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> libp2p-daemon client implementation + +# Install + +```console +$ npm i @libp2p/daemon-client +``` + +# Specs + +The specs for the daemon are currently housed in the go implementation. You can read them at [libp2p/go-libp2p-daemon](https://github.com/libp2p/go-libp2p-daemon/blob/master/specs/README.md) + +# Usage + +## Run a daemon process + +There are currently two implementations of the `libp2p-daemon`: + +- [js-libp2p-daemon](https://github.com/libp2p/js-libp2p-daemon) +- [go-libp2p-daemon](https://github.com/libp2p/go-libp2p-daemon) + +## Interact with the daemon process using the client + +```js +import { createClient } from '@libp2p/daemon-client' +import { multiaddr } from '@multiformats/multiaddr' + +const serverAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') +const client = createClient(serverAddr) + +// interact with the daemon +let identify +try { + identify = await client.identify() +} catch (err) { + // ... +} + +// close the socket +await client.close() +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-client/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-client/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/libp2p-daemon-client/package.json b/packages/libp2p-daemon-client/package.json new file mode 100644 index 0000000000..ce6f8bda4f --- /dev/null +++ b/packages/libp2p-daemon-client/package.json @@ -0,0 +1,160 @@ +{ + "name": "@libp2p/daemon-client", + "version": "9.0.8", + "description": "libp2p-daemon client implementation", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/libp2p/js-libp2p-daemon/tree/main/packages/libp2p-daemon-client#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/libp2p/js-libp2p-daemon.git" + }, + "bugs": { + "url": "https://github.com/libp2p/js-libp2p-daemon/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "libp2p" + ], + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "build": "aegir build", + "pretest": "npm run build", + "test": "aegir test -t node", + "test:node": "aegir test -t node", + "release": "aegir release" + }, + "dependencies": { + "@libp2p/daemon-protocol": "^7.0.0", + "@libp2p/interface": "^2.10.5", + "@libp2p/logger": "^5.1.21", + "@libp2p/peer-id": "^5.1.8", + "@libp2p/tcp": "^10.1.18", + "@libp2p/utils": "^6.7.1", + "@multiformats/multiaddr": "^12.5.1", + "multiformats": "^13.4.0" + }, + "devDependencies": { + "@chainsafe/libp2p-gossipsub": "^14.1.1", + "@libp2p/daemon-server": "^8.0.0", + "@libp2p/kad-dht": "^15.1.10", + "aegir": "^47.0.21", + "it-all": "^3.0.9", + "p-event": "^6.0.1", + "sinon": "^21.0.0", + "sinon-ts": "^2.0.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } +} diff --git a/packages/libp2p-daemon-client/src/dht.ts b/packages/libp2p-daemon-client/src/dht.ts new file mode 100644 index 0000000000..a080d9bd4f --- /dev/null +++ b/packages/libp2p-daemon-client/src/dht.ts @@ -0,0 +1,275 @@ +import { + Request, + Response, + DHTRequest, + DHTResponse +} from '@libp2p/daemon-protocol' +import { InvalidMessageError, InvalidParametersError, ProtocolError, isPeerId } from '@libp2p/interface' +import { logger } from '@libp2p/logger' +import { peerIdFromMultihash } from '@libp2p/peer-id' +import { multiaddr } from '@multiformats/multiaddr' +import { CID } from 'multiformats/cid' +import * as Digest from 'multiformats/hashes/digest' +import { OperationFailedError } from './index.js' +import type { DaemonClient } from './index.js' +import type { PeerId, PeerInfo } from '@libp2p/interface' + +const log = logger('libp2p:daemon-client:dht') + +export class DHT { + private readonly client: DaemonClient + + constructor (client: DaemonClient) { + this.client = client + } + + /** + * Write a value to a key in the DHT + */ + async put (key: Uint8Array, value: Uint8Array): Promise { + if (!(key instanceof Uint8Array)) { + throw new InvalidParametersError('invalid key received') + } + + if (!(value instanceof Uint8Array)) { + throw new InvalidParametersError('value received is not a Uint8Array') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.PUT_VALUE, + key, + value + } + }) + + const response = await sh.read(Response) + + log('read', response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new ProtocolError(response.error?.msg ?? 'DHT put failed') + } + } + + /** + * Query the DHT for a value stored at a key in the DHT + */ + async get (key: Uint8Array): Promise { + if (!(key instanceof Uint8Array)) { + throw new InvalidParametersError('invalid key received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.GET_VALUE, + key + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'DHT get failed') + } + + if (response.dht?.value == null) { + throw new OperationFailedError('Invalid DHT get response') + } + + return response.dht.value + } + + /** + * Query the DHT for a given peer's known addresses. + */ + async findPeer (peerId: PeerId): Promise { + if (!isPeerId(peerId)) { + throw new InvalidParametersError('invalid peer id received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.FIND_PEER, + peer: peerId.toMultihash().bytes + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'DHT find peer failed') + } + + if (response.dht?.peer?.addrs == null) { + throw new OperationFailedError('Invalid response') + } + + return { + id: peerIdFromMultihash(Digest.decode(response.dht.peer.id)), + multiaddrs: response.dht.peer.addrs.map((a) => multiaddr(a)) + } + } + + /** + * Announce to the network that the peer have data addressed by the provided CID + */ + async provide (cid: CID): Promise { + if (cid == null || CID.asCID(cid) == null) { + throw new InvalidParametersError('invalid cid received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.PROVIDE, + cid: cid.bytes + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'DHT provide failed') + } + } + + /** + * Query the DHT for peers that have a piece of content, identified by a CID + */ + async * findProviders (cid: CID, count: number = 1): AsyncIterable { + if (cid == null || CID.asCID(cid) == null) { + throw new InvalidParametersError('invalid cid received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.FIND_PROVIDERS, + cid: cid.bytes, + count + } + }) + + // stream begin message + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + await sh.unwrap().closeWrite() + throw new OperationFailedError(response.error?.msg ?? 'DHT find providers failed') + } + + while (true) { + const dhtResponse = await sh.read(DHTResponse) + + // Stream end + if (dhtResponse.type === DHTResponse.Type.END) { + await sh.unwrap().closeWrite() + return + } + + // Stream values + if (dhtResponse.type === DHTResponse.Type.VALUE && dhtResponse.peer?.addrs != null) { + yield { + id: peerIdFromMultihash(Digest.decode(dhtResponse.peer.id)), + multiaddrs: dhtResponse.peer.addrs.map((a) => multiaddr(a)) + } + } else { + // Unexpected message received + await sh.unwrap().closeWrite() + throw new ProtocolError('unexpected message received') + } + } + } + + /** + * Query the DHT routing table for peers that are closest to a provided key. + */ + async * getClosestPeers (key: Uint8Array): AsyncIterable { + if (!(key instanceof Uint8Array)) { + throw new InvalidParametersError('invalid key received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.GET_CLOSEST_PEERS, + key + } + }) + + // stream begin message + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + await sh.unwrap().closeWrite() + throw new OperationFailedError(response.error?.msg ?? 'DHT find providers failed') + } + + while (true) { + const dhtResponse = await sh.read(DHTResponse) + + // Stream end + if (dhtResponse.type === DHTResponse.Type.END) { + await sh.unwrap().closeWrite() + return + } + + // Stream values + if (dhtResponse.type === DHTResponse.Type.VALUE && dhtResponse.value != null) { + const peerId = peerIdFromMultihash(Digest.decode(dhtResponse.value)) + + yield { + id: peerId, + multiaddrs: [] + } + } else { + // Unexpected message received + await sh.unwrap().closeWrite() + throw new InvalidMessageError('unexpected message received') + } + } + } + + /** + * Query the DHT routing table for a given peer's public key. + */ + async getPublicKey (peerId: PeerId): Promise { + if (!isPeerId(peerId)) { + throw new InvalidParametersError('invalid peer id received') + } + + const sh = await this.client.send({ + type: Request.Type.DHT, + dht: { + type: DHTRequest.Type.GET_PUBLIC_KEY, + peer: peerId.toMultihash().bytes + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'DHT get public key failed') + } + + if (response.dht == null) { + throw new InvalidMessageError('Invalid response') + } + + return response.dht.value + } +} diff --git a/packages/libp2p-daemon-client/src/index.ts b/packages/libp2p-daemon-client/src/index.ts new file mode 100644 index 0000000000..805f9c6d45 --- /dev/null +++ b/packages/libp2p-daemon-client/src/index.ts @@ -0,0 +1,312 @@ +import { Request, Response, StreamInfo } from '@libp2p/daemon-protocol' +import { StreamHandler } from '@libp2p/daemon-protocol/stream-handler' +import { PassThroughUpgrader } from '@libp2p/daemon-protocol/upgrader' +import { InvalidParametersError, isPeerId } from '@libp2p/interface' +import { defaultLogger, logger } from '@libp2p/logger' +import { peerIdFromMultihash } from '@libp2p/peer-id' +import { tcp } from '@libp2p/tcp' +import { pbStream } from '@libp2p/utils' +import { multiaddr, isMultiaddr } from '@multiformats/multiaddr' +import * as Digest from 'multiformats/hashes/digest' +import { DHT } from './dht.js' +import { Pubsub } from './pubsub.js' +import type { PSMessage } from '@libp2p/daemon-protocol' +import type { Stream, PeerId, MultiaddrConnection, PeerInfo, Transport, Listener } from '@libp2p/interface' +import type { ProtobufStream } from '@libp2p/utils' +import type { Multiaddr } from '@multiformats/multiaddr' +import type { CID } from 'multiformats/cid' + +const log = logger('libp2p:daemon-client') + +export class OperationFailedError extends Error { + constructor (message = 'Operation failed') { + super(message) + this.name = 'OperationFailedError' + } +} + +class Client implements DaemonClient { + private readonly multiaddr: Multiaddr + public dht: DHT + public pubsub: Pubsub + private readonly tcp: Transport + + constructor (addr: Multiaddr) { + this.multiaddr = addr + this.tcp = tcp()({ + logger: defaultLogger() + }) + this.dht = new DHT(this) + this.pubsub = new Pubsub(this) + } + + /** + * Connects to a daemon at the unix socket path the daemon + * was created with + * + * @async + * @returns {MultiaddrConnection} + */ + async connectDaemon (signal?: AbortSignal): Promise { + // @ts-expect-error because we use a passthrough upgrader, + // this is actually a MultiaddrConnection and not a Connection + return this.tcp.dial(this.multiaddr, { + upgrader: new PassThroughUpgrader(), + signal: signal ?? AbortSignal.timeout(10_000) + }) + } + + /** + * Sends the request to the daemon and returns a stream. This + * should only be used when sending daemon requests. + */ + async send (request: Request): Promise> { + const maConn = await this.connectDaemon() + + const subtype = request.pubsub?.type ?? request.dht?.type ?? request.peerStore?.type ?? '' + log('send', request.type, subtype) + + const pb = pbStream(maConn) + await pb.write(request, Request) + + return pb + } + + /** + * Connect requests a connection to a known peer on a given set of addresses + */ + async connect (peerId: PeerId, addrs: Multiaddr[]): Promise { + if (!isPeerId(peerId)) { + throw new InvalidParametersError('invalid peer id received') + } + + if (!Array.isArray(addrs)) { + throw new InvalidParametersError('addrs received are not in an array') + } + + addrs.forEach((addr) => { + if (!isMultiaddr(addr)) { + throw new InvalidParametersError('received an address that is not a multiaddr') + } + }) + + const sh = await this.send({ + type: Request.Type.CONNECT, + connect: { + peer: peerId.toMultihash().bytes, + addrs: addrs.map((a) => a.bytes) + } + }) + + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + const errResponse = response.error ?? { msg: 'unspecified' } + throw new OperationFailedError(errResponse.msg ?? 'unspecified') + } + + await sh.unwrap().closeWrite() + } + + /** + * @typedef {object} IdentifyResponse + * @property {PeerId} peerId + * @property {Array.} addrs + */ + + /** + * Identify queries the daemon for its peer ID and listen addresses. + */ + async identify (): Promise { + const sh = await this.send({ + type: Request.Type.IDENTIFY + }) + + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Identify failed') + } + + if (response.identify?.addrs == null) { + throw new OperationFailedError('Invalid response') + } + + const peerId = peerIdFromMultihash(Digest.decode(response.identify?.id)) + const addrs = response.identify.addrs.map((a) => multiaddr(a)) + + await sh.unwrap().closeWrite() + + return ({ peerId, addrs }) + } + + /** + * Get a list of IDs of peers the node is connected to + */ + async listPeers (): Promise { + const sh = await this.send({ + type: Request.Type.LIST_PEERS + }) + + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'List peers failed') + } + + await sh.unwrap().closeWrite() + + return response.peers.map((peer) => peerIdFromMultihash(Digest.decode(peer.id))) + } + + /** + * Initiate an outbound stream to a peer on one of a set of protocols. + */ + async openStream (peerId: PeerId, protocol: string): Promise { + if (!isPeerId(peerId)) { + throw new InvalidParametersError('invalid peer id received') + } + + if (typeof protocol !== 'string') { + throw new InvalidParametersError('invalid protocol received') + } + + const sh = await this.send({ + type: Request.Type.STREAM_OPEN, + streamOpen: { + peer: peerId.toMultihash().bytes, + proto: [protocol] + } + }) + + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + const err = new OperationFailedError(response.error?.msg ?? 'Open stream failed') + sh.unwrap().abort(err) + throw err + } + + return sh.unwrap() + } + + /** + * Register a handler for inbound streams on a given protocol + */ + async registerStreamHandler (protocol: string, handler: StreamHandlerFunction): Promise { + if (typeof protocol !== 'string') { + throw new InvalidParametersError('invalid protocol received') + } + + // open a tcp port, pipe any data from it to the handler function + const listener = this.tcp.createListener({ + upgrader: new PassThroughUpgrader((maConn) => { + this.onConnection(protocol, listener, handler, maConn) + }) + }) + await listener.listen(multiaddr('/ip4/127.0.0.1/tcp/0')) + const address = listener.getAddrs()[0] + + if (address == null) { + throw new OperationFailedError('Could not listen on port') + } + + const sh = await this.send({ + type: Request.Type.STREAM_HANDLER, + streamHandler: { + addr: address.bytes, + proto: [protocol] + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Register stream handler failed') + } + } + + private onConnection (protocol: string, listener: Listener, handler: StreamHandlerFunction, connection: MultiaddrConnection): void { + Promise.resolve() + .then(async () => { + const sh = new StreamHandler({ + stream: connection + }) + const message = await sh.read() + + if (message == null) { + throw new OperationFailedError('Could not read open stream response') + } + + const response = StreamInfo.decode(message) + + if (response.proto !== protocol) { + throw new OperationFailedError('Incorrect protocol') + } + + // @ts-expect-error because we are using a passthrough upgrader, this is a MultiaddrConnection + await handler(sh.rest()) + }) + .catch(err => { + connection.abort(err) + }) + .finally(() => { + connection.closeWrite() + .catch(err => { + log.error(err) + }) + listener.close() + .catch(err => { + log.error(err) + }) + }) + } +} + +export interface IdentifyResult { + peerId: PeerId + addrs: Multiaddr[] +} + +export interface StreamHandlerFunction { + (stream: Stream): Promise +} + +export interface DHTClient { + put(key: Uint8Array, value: Uint8Array): Promise + get(key: Uint8Array): Promise + provide(cid: CID): Promise + findProviders(cid: CID, count?: number): AsyncIterable + findPeer(peerId: PeerId): Promise + getClosestPeers(key: Uint8Array): AsyncIterable +} + +export interface Subscription { + messages(): AsyncIterable + cancel(): Promise +} + +export interface PubSubClient { + publish(topic: string, data: Uint8Array): Promise + subscribe(topic: string): Promise + getTopics(): Promise + getSubscribers(topic: string): Promise +} + +export interface DaemonClient { + identify(): Promise + listPeers(): Promise + connect(peerId: PeerId, addrs: Multiaddr[]): Promise + dht: DHTClient + pubsub: PubSubClient + + send(request: Request): Promise> + openStream(peerId: PeerId, protocol: string): Promise + registerStreamHandler(protocol: string, handler: StreamHandlerFunction): Promise +} + +export function createClient (multiaddr: Multiaddr): DaemonClient { + return new Client(multiaddr) +} diff --git a/packages/libp2p-daemon-client/src/pubsub.ts b/packages/libp2p-daemon-client/src/pubsub.ts new file mode 100644 index 0000000000..ee694ac859 --- /dev/null +++ b/packages/libp2p-daemon-client/src/pubsub.ts @@ -0,0 +1,145 @@ +import { + Request, + Response, + PSRequest, + PSMessage +} from '@libp2p/daemon-protocol' +import { InvalidParametersError } from '@libp2p/interface' +import { peerIdFromMultihash } from '@libp2p/peer-id' +import * as Digest from 'multiformats/hashes/digest' +import { OperationFailedError } from './index.js' +import type { DaemonClient, Subscription } from './index.js' +import type { PeerId } from '@libp2p/interface' + +export class Pubsub { + private readonly client: DaemonClient + + constructor (client: DaemonClient) { + this.client = client + } + + /** + * Get a list of topics the node is subscribed to. + * + * @returns {Array} topics + */ + async getTopics (): Promise { + const sh = await this.client.send({ + type: Request.Type.PUBSUB, + pubsub: { + type: PSRequest.Type.GET_TOPICS + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Pubsub get topics failed') + } + + if (response.pubsub?.topics == null) { + throw new OperationFailedError('Invalid response') + } + + return response.pubsub.topics + } + + /** + * Publish data under a topic + */ + async publish (topic: string, data: Uint8Array): Promise { + if (typeof topic !== 'string') { + throw new InvalidParametersError('invalid topic received') + } + + if (!(data instanceof Uint8Array)) { + throw new InvalidParametersError('data received is not a Uint8Array') + } + + const sh = await this.client.send({ + type: Request.Type.PUBSUB, + pubsub: { + type: PSRequest.Type.PUBLISH, + topic, + data + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Pubsub publish failed') + } + } + + /** + * Request to subscribe a certain topic + */ + async subscribe (topic: string): Promise { + if (typeof topic !== 'string') { + throw new InvalidParametersError('invalid topic received') + } + + const sh = await this.client.send({ + type: Request.Type.PUBSUB, + pubsub: { + type: PSRequest.Type.SUBSCRIBE, + topic + } + }) + + const response = await sh.read(Response) + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Pubsub publish failed') + } + + let subscribed = true + + const subscription: Subscription = { + async * messages () { + while (subscribed) { // eslint-disable-line no-unmodified-loop-condition + yield await sh.read(PSMessage) + } + }, + async cancel () { + subscribed = false + await sh.unwrap().closeWrite() + } + } + + return subscription + } + + async getSubscribers (topic: string): Promise { + if (typeof topic !== 'string') { + throw new InvalidParametersError('invalid topic received') + } + + const sh = await this.client.send({ + type: Request.Type.PUBSUB, + pubsub: { + type: PSRequest.Type.LIST_PEERS, + topic + } + }) + + const response = await sh.read(Response) + + await sh.unwrap().closeWrite() + + if (response.type !== Response.Type.OK) { + throw new OperationFailedError(response.error?.msg ?? 'Pubsub get subscribers failed') + } + + if (response.pubsub?.topics == null) { + throw new OperationFailedError('Invalid response') + } + + return response.pubsub.peerIDs.map(buf => peerIdFromMultihash(Digest.decode(buf))) + } +} diff --git a/packages/libp2p-daemon-client/test/dht.spec.ts b/packages/libp2p-daemon-client/test/dht.spec.ts new file mode 100644 index 0000000000..6e7f150e47 --- /dev/null +++ b/packages/libp2p-daemon-client/test/dht.spec.ts @@ -0,0 +1,268 @@ +/* eslint-env mocha */ + +import { createServer } from '@libp2p/daemon-server' +import { MessageType, EventTypes } from '@libp2p/kad-dht' +import { peerIdFromString } from '@libp2p/peer-id' +import { multiaddr } from '@multiformats/multiaddr' +import { expect } from 'aegir/chai' +import all from 'it-all' +import { CID } from 'multiformats/cid' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { createClient } from '../src/index.js' +import { matchBytes } from './fixtures/match-bytes.js' +import type { DaemonClient } from '../src/index.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2pServer } from '@libp2p/daemon-server' +import type { Libp2p } from '@libp2p/interface' +import type { ValueEvent, FinalPeerEvent, PeerResponseEvent, KadDHT } from '@libp2p/kad-dht' +import type { StubbedInstance } from 'sinon-ts' + +const defaultMultiaddr = multiaddr('/ip4/0.0.0.0/tcp/12345') + +function matchCid (cid: CID): sinon.SinonMatcher { + return sinon.match((c: CID) => c.toString() === cid.toString(), 'cid') +} + +describe('daemon dht client', function () { + this.timeout(30e3) + + let libp2p: StubbedInstance> + let server: Libp2pServer + let client: DaemonClient + let dht: StubbedInstance + + beforeEach(async function () { + dht = stubInterface() + libp2p = stubInterface>() + libp2p.services.dht = dht + + server = createServer(defaultMultiaddr, libp2p) + + await server.start() + + client = createClient(server.getMultiaddr()) + }) + + afterEach(async () => { + if (server != null) { + await server.stop() + } + + sinon.restore() + }) + + describe('put', () => { + const key = uint8ArrayFromString('/key') + const value = uint8ArrayFromString('oh hello there') + + it('should be able to put a value to the dht', async function () { + dht.put.returns(async function * () {}()) + + await client.dht.put(key, value) + + expect(dht.put.calledWith(matchBytes(key), matchBytes(value))).to.be.true() + }) + + it('should error if receive an error message', async () => { + dht.put.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(client.dht.put(key, value)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('get', () => { + it('should be able to get a value from the dht', async function () { + const key = uint8ArrayFromString('/key') + const value = uint8ArrayFromString('oh hello there') + + dht.get.withArgs(matchBytes(key)).returns(async function * () { + const event: ValueEvent = { + name: 'VALUE', + type: EventTypes.VALUE, + value, + from: peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa'), + path: { + index: 0, + running: 0, + queued: 0, + total: 0 + } + } + + yield event + }()) + + const result = await client.dht.get(key) + + expect(result).to.equalBytes(value) + }) + + it('should error if receive an error message', async function () { + const key = uint8ArrayFromString('/key') + + dht.get.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(client.dht.get(key)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('findPeer', () => { + it('should be able to find a peer', async () => { + const id = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + + dht.findPeer.withArgs(id).returns(async function * () { + const event: FinalPeerEvent = { + name: 'FINAL_PEER', + type: EventTypes.FINAL_PEER, + peer: { + id, + multiaddrs: [] + }, + from: peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa'), + path: { + index: 0, + running: 0, + queued: 0, + total: 0 + } + } + + yield event + }()) + + const result = await client.dht.findPeer(id) + + expect(result.id.equals(id)).to.be.true() + }) + + it('should error if receive an error message', async () => { + const id = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + + dht.findPeer.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(client.dht.findPeer(id)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('provide', () => { + it('should be able to provide', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + + dht.provide.returns(async function * () {}()) + + await client.dht.provide(cid) + + expect(dht.provide.calledWith(matchCid(cid))).to.be.true() + }) + + it('should error if receive an error message', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + + dht.provide.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(client.dht.provide(cid)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('findProviders', () => { + it('should be able to find providers', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + const id = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + + dht.findProviders.withArgs(matchCid(cid)).returns(async function * () { + const event: PeerResponseEvent = { + name: 'PEER_RESPONSE', + type: EventTypes.PEER_RESPONSE, + providers: [{ + id, + multiaddrs: [] + }], + closer: [], + from: id, + messageName: 'GET_PROVIDERS', + messageType: MessageType.GET_PROVIDERS, + path: { + index: 0, + running: 0, + queued: 0, + total: 0 + } + } + + yield event + }()) + + const result = await all(client.dht.findProviders(cid)) + + expect(result).to.have.lengthOf(1) + expect(result[0].id.equals(id)).to.be.true() + }) + + // skipped because the protocol doesn't handle streaming errors + it.skip('should error if receive an error message', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + + dht.findProviders.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(all(client.dht.findProviders(cid))).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('getClosestPeers', () => { + it('should be able to get the closest peers', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + const id = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + + dht.getClosestPeers.returns(async function * () { + const event: PeerResponseEvent = { + name: 'PEER_RESPONSE', + type: EventTypes.PEER_RESPONSE, + providers: [], + closer: [{ + id, + multiaddrs: [] + }], + from: id, + messageName: 'GET_PROVIDERS', + messageType: MessageType.GET_PROVIDERS, + path: { + index: 0, + running: 0, + queued: 0, + total: 0 + } + } + + yield event + }()) + + const result = await all(client.dht.getClosestPeers(cid.bytes)) + + expect(result).to.have.lengthOf(1) + expect(result[0].id.equals(id)).to.be.true() + }) + + // skipped because the protocol doesn't handle streaming errors + it.skip('should error if it gets an invalid key', async () => { + const cid = CID.parse('QmVzw6MPsF96TyXBSRs1ptLoVMWRv5FCYJZZGJSVB2Hp38') + + dht.getClosestPeers.returns(async function * () { // eslint-disable-line require-yield + throw new Error('Urk!') + }()) + + await expect(all(client.dht.getClosestPeers(cid.bytes))).to.eventually.be.rejectedWith(/Urk!/) + }) + }) +}) diff --git a/packages/libp2p-daemon-client/test/fixtures/match-bytes.ts b/packages/libp2p-daemon-client/test/fixtures/match-bytes.ts new file mode 100644 index 0000000000..83dd80cf36 --- /dev/null +++ b/packages/libp2p-daemon-client/test/fixtures/match-bytes.ts @@ -0,0 +1,10 @@ +import Sinon from 'sinon' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import type { SinonMatcher } from 'sinon' +import type { Uint8ArrayList } from 'uint8arraylist' + +export function matchBytes (bytes: Uint8Array | Uint8ArrayList): SinonMatcher { + return Sinon.match((val: Uint8Array | Uint8ArrayList) => { + return uint8ArrayEquals(val.subarray(), bytes.subarray()) + }) +} diff --git a/packages/libp2p-daemon-client/test/index.spec.ts b/packages/libp2p-daemon-client/test/index.spec.ts new file mode 100644 index 0000000000..be93cdd7f6 --- /dev/null +++ b/packages/libp2p-daemon-client/test/index.spec.ts @@ -0,0 +1,113 @@ +/* eslint-env mocha */ + +import { createServer } from '@libp2p/daemon-server' +import { isPeerId } from '@libp2p/interface' +import { peerIdFromString } from '@libp2p/peer-id' +import { multiaddr } from '@multiformats/multiaddr' +import { expect } from 'aegir/chai' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { createClient } from '../src/index.js' +import type { DaemonClient } from '../src/index.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2pServer } from '@libp2p/daemon-server' +import type { Connection, Libp2p, PeerStore } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' +import type { StubbedInstance } from 'sinon-ts' + +const defaultMultiaddr = multiaddr('/ip4/0.0.0.0/tcp/0') + +describe('daemon client', function () { + this.timeout(30e3) + + let libp2p: StubbedInstance> + let server: Libp2pServer + let client: DaemonClient + + beforeEach(async function () { + libp2p = stubInterface>() + libp2p.peerStore = stubInterface() + + server = createServer(defaultMultiaddr, libp2p) + + await server.start() + + client = createClient(server.getMultiaddr()) + }) + + afterEach(async () => { + if (server != null) { + await server.stop() + } + + sinon.restore() + }) + + describe('identify', () => { + it('should be able to identify', async () => { + libp2p.peerId = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + libp2p.getMultiaddrs.returns([ + multiaddr('/ip4/0.0.0.0/tcp/1234/p2p/12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + ]) + + const identify = await client.identify() + + expect(identify).to.exist() + expect(identify.peerId).to.exist() + expect(identify.addrs).to.exist() + expect(isPeerId(identify.peerId)) + }) + + it('should error if receive an error message', async () => { + libp2p.peerId = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + libp2p.getMultiaddrs.throws(new Error('Urk!')) + + await expect(client.identify()).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('listPeers', () => { + it('should be able to listPeers', async () => { + const remotePeer = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + const remoteAddr = multiaddr('/ip4/127.0.0.1/tcp/4001') + + libp2p.getConnections.returns([ + stubInterface({ + remotePeer, + remoteAddr + }) + ]) + + const peers = await client.listPeers() + + expect(peers).to.have.lengthOf(1) + expect(peers[0].equals(remotePeer)).to.be.true() + }) + + it('should error if receive an error message', async () => { + libp2p.getConnections.throws(new Error('Urk!')) + + await expect(client.listPeers()).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('connect', () => { + it('should be able to connect', async () => { + const remotePeer = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + const ma = multiaddr('/ip4/1.2.3.4/tcp/1234') + + await client.connect(remotePeer, [ma]) + + expect(libp2p.dial.calledWith(remotePeer)).to.be.true() + }) + + it('should error if receive an error message', async () => { + const remotePeer = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsa') + const ma = multiaddr('/ip4/1.2.3.4/tcp/1234') + + libp2p.dial.rejects(new Error('Urk!')) + + await expect(client.connect(remotePeer, [ma])).to.eventually.be.rejectedWith(/Urk!/) + }) + }) +}) diff --git a/packages/libp2p-daemon-client/test/pubsub.spec.ts b/packages/libp2p-daemon-client/test/pubsub.spec.ts new file mode 100644 index 0000000000..edfd0381ca --- /dev/null +++ b/packages/libp2p-daemon-client/test/pubsub.spec.ts @@ -0,0 +1,126 @@ +/* eslint-env mocha */ + +import { createServer } from '@libp2p/daemon-server' +import { peerIdFromString } from '@libp2p/peer-id' +import { multiaddr } from '@multiformats/multiaddr' +import { expect } from 'aegir/chai' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { createClient } from '../src/index.js' +import type { DaemonClient } from '../src/index.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2pServer } from '@libp2p/daemon-server' +import type { Libp2p } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' +import type { StubbedInstance } from 'sinon-ts' + +const defaultMultiaddr = multiaddr('/ip4/0.0.0.0/tcp/12345') + +describe('daemon pubsub client', function () { + this.timeout(30e3) + + let libp2p: StubbedInstance> + let server: Libp2pServer + let client: DaemonClient + let pubsub: StubbedInstance + + beforeEach(async function () { + pubsub = stubInterface() + libp2p = stubInterface>() + libp2p.services.pubsub = pubsub + + server = createServer(defaultMultiaddr, libp2p) + + await server.start() + + client = createClient(server.getMultiaddr()) + }) + + afterEach(async () => { + if (server != null) { + await server.stop() + } + + sinon.restore() + }) + + describe('getTopics', () => { + it('should get empty list of topics when no subscriptions exist', async () => { + pubsub.getTopics.returns([]) + + const topics = await client.pubsub.getTopics() + + expect(topics).to.have.lengthOf(0) + }) + + it('should get a list with a topic when subscribed', async () => { + const topic = 'test-topic' + pubsub.getTopics.returns([topic]) + + const topics = await client.pubsub.getTopics() + + expect(topics).to.have.lengthOf(1) + expect(topics[0]).to.equal(topic) + }) + + it('should error if receive an error message', async () => { + pubsub.getTopics.throws(new Error('Urk!')) + + await expect(client.pubsub.getTopics()).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('publish', () => { + it('should publish an event', async () => { + const topic = 'test-topic' + const data = uint8ArrayFromString('hello world') + + await client.pubsub.publish(topic, data) + + expect(pubsub.publish.called).to.be.true() + + const call = pubsub.publish.getCall(0) + + expect(call).to.have.nested.property('args[0]', topic) + expect(call).to.have.deep.nested.property('args[1]', data) + }) + + it('should error if receive an error message', async () => { + const topic = 'test-topic' + const data = uint8ArrayFromString('hello world') + pubsub.publish.throws(new Error('Urk!')) + + await expect(client.pubsub.publish(topic, data)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) + + describe('getSubscribers', () => { + it('should get empty list of topics when no subscriptions exist', async () => { + pubsub.getSubscribers.returns([]) + + const topic = 'test-topic' + const topics = await client.pubsub.getSubscribers(topic) + + expect(topics).to.have.lengthOf(0) + }) + + it('should get a list with a peer when subscribed', async () => { + const topic = 'test-topic' + const peer = peerIdFromString('12D3KooWKnQbfH5t1XxJW5FBoMGNjmC9LTSbDdRJxtYj2bJV5XfP') + pubsub.getSubscribers.withArgs(topic).returns([peer]) + + const peers = await client.pubsub.getSubscribers(topic) + + expect(peers).to.have.lengthOf(1) + expect(peers[0].toString()).to.equal(peer.toString()) + }) + + it('should error if receive an error message', async () => { + const topic = 'test-topic' + pubsub.getSubscribers.throws(new Error('Urk!')) + + await expect(client.pubsub.getSubscribers(topic)).to.eventually.be.rejectedWith(/Urk!/) + }) + }) +}) diff --git a/packages/libp2p-daemon-client/test/stream.spec.ts b/packages/libp2p-daemon-client/test/stream.spec.ts new file mode 100644 index 0000000000..46252e3627 --- /dev/null +++ b/packages/libp2p-daemon-client/test/stream.spec.ts @@ -0,0 +1,85 @@ +/* eslint-env mocha */ + +import { createServer } from '@libp2p/daemon-server' +import { peerIdFromString } from '@libp2p/peer-id' +import { echo, streamPair } from '@libp2p/utils' +import { multiaddr } from '@multiformats/multiaddr' +import { expect } from 'aegir/chai' +import all from 'it-all' +import { pEvent } from 'p-event' +import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { createClient } from '../src/index.js' +import type { DaemonClient } from '../src/index.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2pServer } from '@libp2p/daemon-server' +import type { Connection, Libp2p, PeerStore } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' +import type { StubbedInstance } from 'sinon-ts' + +const defaultMultiaddr = multiaddr('/ip4/0.0.0.0/tcp/0') + +describe('daemon stream client', function () { + this.timeout(50e3) + + let libp2p: StubbedInstance> + let server: Libp2pServer + let client: DaemonClient + + beforeEach(async function () { + libp2p = stubInterface>() + libp2p.peerStore = stubInterface() + + server = createServer(defaultMultiaddr, libp2p) + + await server.start() + + client = createClient(server.getMultiaddr()) + }) + + afterEach(async () => { + if (server != null) { + await server.stop() + } + + sinon.restore() + }) + + it('should be able to open a stream, write to it and a stream handler, should handle the message', async () => { + const protocol = '/echo/1.0.0' + + const [outboundStream, inboundStream] = await streamPair({ + protocol + }) + + // echo all bytes back to the sender + void echo(inboundStream) + + const peerB = peerIdFromString('12D3KooWJKCJW8Y26pRFNv78TCMGLNTfyN8oKaFswMRYXTzSbSsb') + + const peerAToPeerB = stubInterface({ + newStream: async () => { + return outboundStream + } + }) + + libp2p.dial.withArgs(peerB).resolves(peerAToPeerB) + + const stream = await client.openStream(peerB, protocol) + const dataPromise = all(stream) + + stream.send(uint8ArrayFromString('hello world')) + + await Promise.all([ + pEvent(inboundStream, 'close'), + stream.closeWrite() + ]) + + const data = await dataPromise + + expect(data).to.have.lengthOf(1) + expect(uint8ArrayToString(data[0].subarray())).to.equal('hello world') + }) +}) diff --git a/packages/libp2p-daemon-client/tsconfig.json b/packages/libp2p-daemon-client/tsconfig.json new file mode 100644 index 0000000000..b0b79cc286 --- /dev/null +++ b/packages/libp2p-daemon-client/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ], + "references": [ + { + "path": "../libp2p-daemon-protocol" + }, + { + "path": "../libp2p-daemon-server" + } + ] +} diff --git a/packages/libp2p-daemon-client/typedoc.json b/packages/libp2p-daemon-client/typedoc.json new file mode 100644 index 0000000000..db0b0747ef --- /dev/null +++ b/packages/libp2p-daemon-client/typedoc.json @@ -0,0 +1,6 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts" + ] +} diff --git a/packages/libp2p-daemon-protocol/CHANGELOG.md b/packages/libp2p-daemon-protocol/CHANGELOG.md new file mode 100644 index 0000000000..5a0b4ec4bb --- /dev/null +++ b/packages/libp2p-daemon-protocol/CHANGELOG.md @@ -0,0 +1,232 @@ +## [@libp2p/daemon-protocol-v7.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-7.0.5...@libp2p/daemon-protocol-7.0.6) (2025-08-13) + +### Dependencies + +* bump aegir from 46.0.5 to 47.0.21 ([#343](https://github.com/libp2p/js-libp2p-daemon/issues/343)) ([704c22f](https://github.com/libp2p/js-libp2p-daemon/commit/704c22f102362c6036642a73979d262e1214baa5)) + +## [@libp2p/daemon-protocol-v7.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-7.0.4...@libp2p/daemon-protocol-7.0.5) (2025-05-19) + +### Dependencies + +* bump aegir from 45.2.1 to 46.0.2 ([#297](https://github.com/libp2p/js-libp2p-daemon/issues/297)) ([09c1457](https://github.com/libp2p/js-libp2p-daemon/commit/09c1457ce93a45cab43869892cd9174617a34c29)) + +## [@libp2p/daemon-protocol-v7.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-7.0.3...@libp2p/daemon-protocol-7.0.4) (2025-02-24) + +### Bug Fixes + +* update upgrader ([e3a3feb](https://github.com/libp2p/js-libp2p-daemon/commit/e3a3feb354ae2c7f3d05959533f6ebc2ac5348d1)) + +## [@libp2p/daemon-protocol-v7.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-7.0.2...@libp2p/daemon-protocol-7.0.3) (2024-11-08) + +### Bug Fixes + +* fix mismatched versions ([c6d619f](https://github.com/libp2p/js-libp2p-daemon/commit/c6d619f9af99a5a28089aeec2f047ca1fca1f2e1)) +* update project config ([4cf3a98](https://github.com/libp2p/js-libp2p-daemon/commit/4cf3a98dd76f8a41ef7f70d9e1696f2a06049f69)) + +## @libp2p/daemon-protocol [6.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v6.0.1...@libp2p/daemon-protocol-6.0.2) (2024-02-07) + + +### Dependencies + +* bump aegir from 41.3.5 to 42.2.3 ([#262](https://github.com/libp2p/js-libp2p-daemon/issues/262)) ([2bb9733](https://github.com/libp2p/js-libp2p-daemon/commit/2bb97338d76e4cc48490326083fb13bd9ae60a74)) + +## [@libp2p/daemon-protocol-v6.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v6.0.0...@libp2p/daemon-protocol-v6.0.1) (2023-11-30) + + +### Bug Fixes + +* read stream data using lp stream ([#242](https://github.com/libp2p/js-libp2p-daemon/issues/242)) ([ee746f6](https://github.com/libp2p/js-libp2p-daemon/commit/ee746f6164b32536a80b9f376b46ef90d7653dcd)) + +## [@libp2p/daemon-protocol-v6.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v5.0.1...@libp2p/daemon-protocol-v6.0.0) (2023-11-30) + + +### ⚠ BREAKING CHANGES + +* updates js-libp2p to v1 + +### Dependencies + +* update libp2p to v1 ([#235](https://github.com/libp2p/js-libp2p-daemon/issues/235)) ([6f2917b](https://github.com/libp2p/js-libp2p-daemon/commit/6f2917b714756e3632ff6c522668f7c2166d4389)) + +## [@libp2p/daemon-protocol-v5.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v5.0.0...@libp2p/daemon-protocol-v5.0.1) (2023-11-02) + + +### Dependencies + +* bump aegir from 40.0.13 to 41.1.6 ([#232](https://github.com/libp2p/js-libp2p-daemon/issues/232)) ([653c74b](https://github.com/libp2p/js-libp2p-daemon/commit/653c74b6272fd6d11d686bf7bb44b49b6757b633)) + +## [@libp2p/daemon-protocol-v5.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v4.0.1...@libp2p/daemon-protocol-v5.0.0) (2023-07-31) + + +### ⚠ BREAKING CHANGES + +* stream close methods are now asyc, requires libp2p@0.46.x or later + +### Features + +* close streams gracefully ([#213](https://github.com/libp2p/js-libp2p-daemon/issues/213)) ([92eebfa](https://github.com/libp2p/js-libp2p-daemon/commit/92eebfa12ba1fb42ae6c9e164fb0d69647e62074)) + + +### Dependencies + +* bump aegir from 38.1.8 to 39.0.1 ([#202](https://github.com/libp2p/js-libp2p-daemon/issues/202)) ([3bf4027](https://github.com/libp2p/js-libp2p-daemon/commit/3bf402752a92c3ebb96435eaa7923ce22ef76ea0)) + +## [@libp2p/daemon-protocol-v4.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v4.0.0...@libp2p/daemon-protocol-v4.0.1) (2023-04-24) + + +### Dependencies + +* bump @libp2p/interface-peer-store from 1.2.9 to 2.0.0 ([#201](https://github.com/libp2p/js-libp2p-daemon/issues/201)) ([9b146a8](https://github.com/libp2p/js-libp2p-daemon/commit/9b146a8c38c30a13401be6da5259cd9da6bdc25c)) + +## [@libp2p/daemon-protocol-v4.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.6...@libp2p/daemon-protocol-v4.0.0) (2023-04-19) + + +### ⚠ BREAKING CHANGES + +* the type of the source/sink properties have changed + +### Dependencies + +* update it-stream-types to 2.x.x ([#196](https://github.com/libp2p/js-libp2p-daemon/issues/196)) ([a09f6d5](https://github.com/libp2p/js-libp2p-daemon/commit/a09f6d58942033b08b579735aaa1537b3a324776)) + +## [@libp2p/daemon-protocol-v3.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.5...@libp2p/daemon-protocol-v3.0.6) (2023-02-22) + + +### Dependencies + +* bump aegir from 37.12.1 to 38.1.6 ([#183](https://github.com/libp2p/js-libp2p-daemon/issues/183)) ([6725a0a](https://github.com/libp2p/js-libp2p-daemon/commit/6725a0aeba9acb56a7530dece6c65a0f3eadfec5)) + +## [@libp2p/daemon-protocol-v3.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.4...@libp2p/daemon-protocol-v3.0.5) (2023-02-22) + + +### Trivial Changes + +* remove lerna ([#171](https://github.com/libp2p/js-libp2p-daemon/issues/171)) ([367f912](https://github.com/libp2p/js-libp2p-daemon/commit/367f9122f2fe1c31c8de7a136cda18d024ff08d7)) + + +### Dependencies + +* **dev:** bump protons from 6.1.3 to 7.0.2 ([#179](https://github.com/libp2p/js-libp2p-daemon/issues/179)) ([07d5872](https://github.com/libp2p/js-libp2p-daemon/commit/07d5872e04f95e2e8957f083dae3721aa8dc307e)) + +## [@libp2p/daemon-protocol-v3.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.3...@libp2p/daemon-protocol-v3.0.4) (2022-10-14) + + +### Bug Fixes + +* handle empty responses ([#145](https://github.com/libp2p/js-libp2p-daemon/issues/145)) ([0dfb823](https://github.com/libp2p/js-libp2p-daemon/commit/0dfb8236a0ab57a55fa0ebb91ac7a776a9f709da)) +* restore proto2 compatibility ([#146](https://github.com/libp2p/js-libp2p-daemon/issues/146)) ([9fe8e04](https://github.com/libp2p/js-libp2p-daemon/commit/9fe8e042757ec107cc137a9452fd021a62620b3c)) + +## [@libp2p/daemon-protocol-v3.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.2...@libp2p/daemon-protocol-v3.0.3) (2022-10-13) + + +### Dependencies + +* update uint8arrays, protons and multiformats ([#143](https://github.com/libp2p/js-libp2p-daemon/issues/143)) ([661139c](https://github.com/libp2p/js-libp2p-daemon/commit/661139c674c9994724e32227d7d9ae2c5da1cea2)) + +## [@libp2p/daemon-protocol-v3.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.1...@libp2p/daemon-protocol-v3.0.2) (2022-10-07) + + +### Dependencies + +* bump @libp2p/interface-transport from 1.0.4 to 2.0.0 ([#132](https://github.com/libp2p/js-libp2p-daemon/issues/132)) ([1a7b2cc](https://github.com/libp2p/js-libp2p-daemon/commit/1a7b2cc653dfb51e92edb1f652452e3c793156c3)) + +## [@libp2p/daemon-protocol-v3.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v3.0.0...@libp2p/daemon-protocol-v3.0.1) (2022-09-14) + + +### Bug Fixes + +* fix proto file and generated code ([#94](https://github.com/libp2p/js-libp2p-daemon/issues/94)) ([5c22052](https://github.com/libp2p/js-libp2p-daemon/commit/5c22052c8da0da4febf88582e9e27a93ac1f710b)), closes [#66](https://github.com/libp2p/js-libp2p-daemon/issues/66) + +## [@libp2p/daemon-protocol-v3.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v2.0.2...@libp2p/daemon-protocol-v3.0.0) (2022-09-09) + + +### ⚠ BREAKING CHANGES + +* the stream type returned by `client.openStream` has changed + +### Bug Fixes + +* allow opening remote streams ([#126](https://github.com/libp2p/js-libp2p-daemon/issues/126)) ([361cc57](https://github.com/libp2p/js-libp2p-daemon/commit/361cc5750de505ab0381ae43609c67d5d4f659a7)) + +## [@libp2p/daemon-protocol-v2.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v2.0.1...@libp2p/daemon-protocol-v2.0.2) (2022-08-10) + + +### Bug Fixes + +* update all deps ([#124](https://github.com/libp2p/js-libp2p-daemon/issues/124)) ([5e46e1e](https://github.com/libp2p/js-libp2p-daemon/commit/5e46e1e26c23428046a6007ab158420d3d830145)) + +## [@libp2p/daemon-protocol-v2.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v2.0.0...@libp2p/daemon-protocol-v2.0.1) (2022-07-31) + + +### Trivial Changes + +* update project config ([#111](https://github.com/libp2p/js-libp2p-daemon/issues/111)) ([345e663](https://github.com/libp2p/js-libp2p-daemon/commit/345e663e34278e780fc2f3a6b595294f925c4521)) + + +### Dependencies + +* update uint8arraylist and protons deps ([#115](https://github.com/libp2p/js-libp2p-daemon/issues/115)) ([34a8334](https://github.com/libp2p/js-libp2p-daemon/commit/34a83340ba855a9c08319ae1cd735dfa8b71c248)) + +## [@libp2p/daemon-protocol-v2.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.6...@libp2p/daemon-protocol-v2.0.0) (2022-06-15) + + +### ⚠ BREAKING CHANGES + +* uses new single-issue libp2p interface modules + +### Features + +* update to latest libp2p interfaces ([#102](https://github.com/libp2p/js-libp2p-daemon/issues/102)) ([f5e9121](https://github.com/libp2p/js-libp2p-daemon/commit/f5e91210654ab3c411e316c1c657356c037a0f6a)) + +## [@libp2p/daemon-protocol-v1.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.5...@libp2p/daemon-protocol-v1.0.6) (2022-05-25) + + +### Trivial Changes + +* update docs ([#91](https://github.com/libp2p/js-libp2p-daemon/issues/91)) ([5b072ff](https://github.com/libp2p/js-libp2p-daemon/commit/5b072ff89f30fd6cf55a3387bf0961c8ad78a22f)), closes [#83](https://github.com/libp2p/js-libp2p-daemon/issues/83) + +## [@libp2p/daemon-protocol-v1.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.4...@libp2p/daemon-protocol-v1.0.5) (2022-05-10) + + +### Bug Fixes + +* encode enums correctly ([#86](https://github.com/libp2p/js-libp2p-daemon/issues/86)) ([6ce4633](https://github.com/libp2p/js-libp2p-daemon/commit/6ce4633f3db41ab66f9b8b1abbe84955dde3e9be)) + +## [@libp2p/daemon-protocol-v1.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.3...@libp2p/daemon-protocol-v1.0.4) (2022-04-20) + + +### Bug Fixes + +* update interfaces and deps ([#84](https://github.com/libp2p/js-libp2p-daemon/issues/84)) ([25173d5](https://github.com/libp2p/js-libp2p-daemon/commit/25173d5b2edf0e9dd9132707d349cdc862caecdb)) + +## [@libp2p/daemon-protocol-v1.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.2...@libp2p/daemon-protocol-v1.0.3) (2022-04-07) + + +### Bug Fixes + +* update generated file ([#82](https://github.com/libp2p/js-libp2p-daemon/issues/82)) ([fc66301](https://github.com/libp2p/js-libp2p-daemon/commit/fc66301b6da3d24bc065f37337705753873c6e60)) + +## [@libp2p/daemon-protocol-v1.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.1...@libp2p/daemon-protocol-v1.0.2) (2022-04-07) + + +### Bug Fixes + +* remove protobufjs and replace with protons ([#81](https://github.com/libp2p/js-libp2p-daemon/issues/81)) ([78dd02a](https://github.com/libp2p/js-libp2p-daemon/commit/78dd02a679e55f22c7e24c1ee2b6f92a4679a0b9)) + +## [@libp2p/daemon-protocol-v1.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-protocol-v1.0.0...@libp2p/daemon-protocol-v1.0.1) (2022-04-07) + + +### Trivial Changes + +* update aegir to latest version ([#80](https://github.com/libp2p/js-libp2p-daemon/issues/80)) ([3a98959](https://github.com/libp2p/js-libp2p-daemon/commit/3a98959617d9c19bba9fb064defee3d51acfcc29)) + +## @libp2p/daemon-protocol-v1.0.0 (2022-03-28) + + +### ⚠ BREAKING CHANGES + +* This module is now ESM only + +### Features + +* convert to typescript ([#78](https://github.com/libp2p/js-libp2p-daemon/issues/78)) ([f18b2a4](https://github.com/libp2p/js-libp2p-daemon/commit/f18b2a45871a2704db51b03e8583eefdcd13554c)) diff --git a/packages/libp2p-daemon-protocol/CODE_OF_CONDUCT.md b/packages/libp2p-daemon-protocol/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..6b0fa54c54 --- /dev/null +++ b/packages/libp2p-daemon-protocol/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/libp2p-daemon-protocol/LICENSE-APACHE b/packages/libp2p-daemon-protocol/LICENSE-APACHE new file mode 100644 index 0000000000..b09cd7856d --- /dev/null +++ b/packages/libp2p-daemon-protocol/LICENSE-APACHE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/libp2p-daemon-protocol/LICENSE-MIT b/packages/libp2p-daemon-protocol/LICENSE-MIT new file mode 100644 index 0000000000..72dc60d84b --- /dev/null +++ b/packages/libp2p-daemon-protocol/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/libp2p-daemon-protocol/README.md b/packages/libp2p-daemon-protocol/README.md new file mode 100644 index 0000000000..979298f1c1 --- /dev/null +++ b/packages/libp2p-daemon-protocol/README.md @@ -0,0 +1,29 @@ +# @libp2p/daemon-protocol + +[![libp2p.io](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) +[![Discuss](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg?style=flat-square)](https://discuss.libp2p.io) +[![codecov](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-daemon.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-daemon) +[![CI](https://img.shields.io/github/actions/workflow/status/libp2p/js-libp2p-daemon/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/libp2p/js-libp2p-daemon/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> Communication protocol between libp2p daemons and clients + +# Install + +```console +$ npm i @libp2p/daemon-protocol +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-protocol/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-protocol/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/libp2p-daemon-protocol/package.json b/packages/libp2p-daemon-protocol/package.json new file mode 100644 index 0000000000..6218ce00dc --- /dev/null +++ b/packages/libp2p-daemon-protocol/package.json @@ -0,0 +1,170 @@ +{ + "name": "@libp2p/daemon-protocol", + "version": "7.0.6", + "description": "Communication protocol between libp2p daemons and clients", + "author": "", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/libp2p/js-libp2p-daemon/tree/main/packages/libp2p-daemon-protocol#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/libp2p/js-libp2p-daemon.git" + }, + "bugs": { + "url": "https://github.com/libp2p/js-libp2p-daemon/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "type": "module", + "types": "./dist/src/index.d.ts", + "typesVersions": { + "*": { + "*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ], + "src/*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ] + } + }, + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./src/index.d.ts", + "import": "./dist/src/index.js" + }, + "./stream-handler": { + "types": "./dist/src/stream-handler.d.ts", + "import": "./dist/src/stream-handler.js" + }, + "./upgrader": { + "types": "./dist/src/upgrader.d.ts", + "import": "./dist/src/upgrader.js" + } + }, + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "generate": "protons ./src/index.proto", + "build": "aegir build", + "dep-check": "aegir dep-check", + "release": "aegir release" + }, + "dependencies": { + "@libp2p/interface": "^2.10.5", + "@libp2p/logger": "^5.1.21", + "@libp2p/utils": "^6.7.1", + "any-signal": "^4.1.1", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8" + }, + "devDependencies": { + "aegir": "^47.0.21", + "protons": "^7.7.0" + } +} diff --git a/packages/libp2p-daemon-protocol/src/index.proto b/packages/libp2p-daemon-protocol/src/index.proto new file mode 100644 index 0000000000..038a513cb3 --- /dev/null +++ b/packages/libp2p-daemon-protocol/src/index.proto @@ -0,0 +1,209 @@ +syntax = "proto3"; + +message Request { + enum Type { + IDENTIFY = 0; + CONNECT = 1; + STREAM_OPEN = 2; + STREAM_HANDLER = 3; + DHT = 4; + LIST_PEERS = 5; + CONNMANAGER = 6; + DISCONNECT = 7; + PUBSUB = 8; + PEERSTORE = 9; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional ConnectRequest connect = 2; + optional StreamOpenRequest streamOpen = 3; + optional StreamHandlerRequest streamHandler = 4; + optional DHTRequest dht = 5; + optional ConnManagerRequest connManager = 6; + optional DisconnectRequest disconnect = 7; + optional PSRequest pubsub = 8; + optional PeerstoreRequest peerStore = 9; +} + +message Response { + enum Type { + OK = 0; + ERROR = 1; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional ErrorResponse error = 2; + optional StreamInfo streamInfo = 3; + optional IdentifyResponse identify = 4; + optional DHTResponse dht = 5; + repeated PeerInfo peers = 6; + optional PSResponse pubsub = 7; + optional PeerstoreResponse peerStore = 8; +} + +message IdentifyResponse { + bytes id = 1; + repeated bytes addrs = 2; +} + +message ConnectRequest { + bytes peer = 1; + repeated bytes addrs = 2; + optional int64 timeout = 3; +} + +message StreamOpenRequest { + bytes peer = 1; + repeated string proto = 2; + optional int64 timeout = 3; +} + +message StreamHandlerRequest { + bytes addr = 1; + repeated string proto = 2; +} + +message ErrorResponse { + string msg = 1; +} + +message StreamInfo { + bytes peer = 1; + bytes addr = 2; + string proto = 3; +} + +message DHTRequest { + enum Type { + FIND_PEER = 0; + FIND_PEERS_CONNECTED_TO_PEER = 1; + FIND_PROVIDERS = 2; + GET_CLOSEST_PEERS = 3; + GET_PUBLIC_KEY = 4; + GET_VALUE = 5; + SEARCH_VALUE = 6; + PUT_VALUE = 7; + PROVIDE = 8; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional bytes peer = 2; + optional bytes cid = 3; + optional bytes key = 4; + optional bytes value = 5; + optional int32 count = 6; + optional int64 timeout = 7; +} + +message DHTResponse { + enum Type { + BEGIN = 0; + VALUE = 1; + END = 2; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional PeerInfo peer = 2; + optional bytes value = 3; +} + +message PeerInfo { + bytes id = 1; + repeated bytes addrs = 2; +} + +message ConnManagerRequest { + enum Type { + TAG_PEER = 0; + UNTAG_PEER = 1; + TRIM = 2; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional bytes peer = 2; + optional string tag = 3; + optional int64 weight = 4; +} + +message DisconnectRequest { + bytes peer = 1; +} + +message PSRequest { + enum Type { + GET_TOPICS = 0; + LIST_PEERS = 1; + PUBLISH = 2; + SUBSCRIBE = 3; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional string topic = 2; + optional bytes data = 3; +} + +message PSMessage { + optional bytes from = 1; + optional bytes data = 2; + optional bytes seqno = 3; + repeated string topicIDs = 4; + optional bytes signature = 5; + optional bytes key = 6; +} + +message PSResponse { + repeated string topics = 1; + repeated bytes peerIDs = 2; +} + +message PeerstoreRequest { + enum Type { + UNSPECIFIED = 0; + GET_PROTOCOLS = 1; + GET_PEER_INFO = 2; + } + + // the proto2 version of this field is "required" which means it will have + // no default value. the default for proto3 is "singluar" which omits the + // value on the wire if it's the default so for proto3 we make it "optional" + // to ensure a value is always written on to the wire + optional Type type = 1; + + optional bytes id = 2; + repeated string protos = 3; +} + +message PeerstoreResponse { + optional PeerInfo peer = 1; + repeated string protos = 2; +} diff --git a/packages/libp2p-daemon-protocol/src/index.ts b/packages/libp2p-daemon-protocol/src/index.ts new file mode 100644 index 0000000000..2b073f27f9 --- /dev/null +++ b/packages/libp2p-daemon-protocol/src/index.ts @@ -0,0 +1,1633 @@ +import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' +import type { Codec } from 'protons-runtime' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface Request { + type?: Request.Type + connect?: ConnectRequest + streamOpen?: StreamOpenRequest + streamHandler?: StreamHandlerRequest + dht?: DHTRequest + connManager?: ConnManagerRequest + disconnect?: DisconnectRequest + pubsub?: PSRequest + peerStore?: PeerstoreRequest +} + +export namespace Request { + export enum Type { + IDENTIFY = 'IDENTIFY', + CONNECT = 'CONNECT', + STREAM_OPEN = 'STREAM_OPEN', + STREAM_HANDLER = 'STREAM_HANDLER', + DHT = 'DHT', + LIST_PEERS = 'LIST_PEERS', + CONNMANAGER = 'CONNMANAGER', + DISCONNECT = 'DISCONNECT', + PUBSUB = 'PUBSUB', + PEERSTORE = 'PEERSTORE' + } + + enum __TypeValues { + IDENTIFY = 0, + CONNECT = 1, + STREAM_OPEN = 2, + STREAM_HANDLER = 3, + DHT = 4, + LIST_PEERS = 5, + CONNMANAGER = 6, + DISCONNECT = 7, + PUBSUB = 8, + PEERSTORE = 9 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + Request.Type.codec().encode(obj.type, w) + } + + if (obj.connect != null) { + w.uint32(18) + ConnectRequest.codec().encode(obj.connect, w) + } + + if (obj.streamOpen != null) { + w.uint32(26) + StreamOpenRequest.codec().encode(obj.streamOpen, w) + } + + if (obj.streamHandler != null) { + w.uint32(34) + StreamHandlerRequest.codec().encode(obj.streamHandler, w) + } + + if (obj.dht != null) { + w.uint32(42) + DHTRequest.codec().encode(obj.dht, w) + } + + if (obj.connManager != null) { + w.uint32(50) + ConnManagerRequest.codec().encode(obj.connManager, w) + } + + if (obj.disconnect != null) { + w.uint32(58) + DisconnectRequest.codec().encode(obj.disconnect, w) + } + + if (obj.pubsub != null) { + w.uint32(66) + PSRequest.codec().encode(obj.pubsub, w) + } + + if (obj.peerStore != null) { + w.uint32(74) + PeerstoreRequest.codec().encode(obj.peerStore, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = Request.Type.codec().decode(reader) + break + case 2: + obj.connect = ConnectRequest.codec().decode(reader, reader.uint32()) + break + case 3: + obj.streamOpen = StreamOpenRequest.codec().decode(reader, reader.uint32()) + break + case 4: + obj.streamHandler = StreamHandlerRequest.codec().decode(reader, reader.uint32()) + break + case 5: + obj.dht = DHTRequest.codec().decode(reader, reader.uint32()) + break + case 6: + obj.connManager = ConnManagerRequest.codec().decode(reader, reader.uint32()) + break + case 7: + obj.disconnect = DisconnectRequest.codec().decode(reader, reader.uint32()) + break + case 8: + obj.pubsub = PSRequest.codec().decode(reader, reader.uint32()) + break + case 9: + obj.peerStore = PeerstoreRequest.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Request.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Request => { + return decodeMessage(buf, Request.codec()) + } +} + +export interface Response { + type?: Response.Type + error?: ErrorResponse + streamInfo?: StreamInfo + identify?: IdentifyResponse + dht?: DHTResponse + peers: PeerInfo[] + pubsub?: PSResponse + peerStore?: PeerstoreResponse +} + +export namespace Response { + export enum Type { + OK = 'OK', + ERROR = 'ERROR' + } + + enum __TypeValues { + OK = 0, + ERROR = 1 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + Response.Type.codec().encode(obj.type, w) + } + + if (obj.error != null) { + w.uint32(18) + ErrorResponse.codec().encode(obj.error, w) + } + + if (obj.streamInfo != null) { + w.uint32(26) + StreamInfo.codec().encode(obj.streamInfo, w) + } + + if (obj.identify != null) { + w.uint32(34) + IdentifyResponse.codec().encode(obj.identify, w) + } + + if (obj.dht != null) { + w.uint32(42) + DHTResponse.codec().encode(obj.dht, w) + } + + if (obj.peers != null) { + for (const value of obj.peers) { + w.uint32(50) + PeerInfo.codec().encode(value, w) + } + } + + if (obj.pubsub != null) { + w.uint32(58) + PSResponse.codec().encode(obj.pubsub, w) + } + + if (obj.peerStore != null) { + w.uint32(66) + PeerstoreResponse.codec().encode(obj.peerStore, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + peers: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = Response.Type.codec().decode(reader) + break + case 2: + obj.error = ErrorResponse.codec().decode(reader, reader.uint32()) + break + case 3: + obj.streamInfo = StreamInfo.codec().decode(reader, reader.uint32()) + break + case 4: + obj.identify = IdentifyResponse.codec().decode(reader, reader.uint32()) + break + case 5: + obj.dht = DHTResponse.codec().decode(reader, reader.uint32()) + break + case 6: + obj.peers.push(PeerInfo.codec().decode(reader, reader.uint32())) + break + case 7: + obj.pubsub = PSResponse.codec().decode(reader, reader.uint32()) + break + case 8: + obj.peerStore = PeerstoreResponse.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Response.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Response => { + return decodeMessage(buf, Response.codec()) + } +} + +export interface IdentifyResponse { + id: Uint8Array + addrs: Uint8Array[] +} + +export namespace IdentifyResponse { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.id != null && obj.id.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.id) + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18) + w.bytes(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + id: new Uint8Array(0), + addrs: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, IdentifyResponse.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): IdentifyResponse => { + return decodeMessage(buf, IdentifyResponse.codec()) + } +} + +export interface ConnectRequest { + peer: Uint8Array + addrs: Uint8Array[] + timeout?: bigint +} + +export namespace ConnectRequest { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.peer != null && obj.peer.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.peer) + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18) + w.bytes(value) + } + } + + if (obj.timeout != null) { + w.uint32(24) + w.int64(obj.timeout) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + peer: new Uint8Array(0), + addrs: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.addrs.push(reader.bytes()) + break + case 3: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, ConnectRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ConnectRequest => { + return decodeMessage(buf, ConnectRequest.codec()) + } +} + +export interface StreamOpenRequest { + peer: Uint8Array + proto: string[] + timeout?: bigint +} + +export namespace StreamOpenRequest { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.peer != null && obj.peer.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.peer) + } + + if (obj.proto != null) { + for (const value of obj.proto) { + w.uint32(18) + w.string(value) + } + } + + if (obj.timeout != null) { + w.uint32(24) + w.int64(obj.timeout) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + peer: new Uint8Array(0), + proto: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.proto.push(reader.string()) + break + case 3: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, StreamOpenRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): StreamOpenRequest => { + return decodeMessage(buf, StreamOpenRequest.codec()) + } +} + +export interface StreamHandlerRequest { + addr: Uint8Array + proto: string[] +} + +export namespace StreamHandlerRequest { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.addr != null && obj.addr.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.addr) + } + + if (obj.proto != null) { + for (const value of obj.proto) { + w.uint32(18) + w.string(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + addr: new Uint8Array(0), + proto: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.addr = reader.bytes() + break + case 2: + obj.proto.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, StreamHandlerRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): StreamHandlerRequest => { + return decodeMessage(buf, StreamHandlerRequest.codec()) + } +} + +export interface ErrorResponse { + msg: string +} + +export namespace ErrorResponse { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.msg != null && obj.msg !== '')) { + w.uint32(10) + w.string(obj.msg) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + msg: '' + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.msg = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, ErrorResponse.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ErrorResponse => { + return decodeMessage(buf, ErrorResponse.codec()) + } +} + +export interface StreamInfo { + peer: Uint8Array + addr: Uint8Array + proto: string +} + +export namespace StreamInfo { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.peer != null && obj.peer.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.peer) + } + + if ((obj.addr != null && obj.addr.byteLength > 0)) { + w.uint32(18) + w.bytes(obj.addr) + } + + if ((obj.proto != null && obj.proto !== '')) { + w.uint32(26) + w.string(obj.proto) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + peer: new Uint8Array(0), + addr: new Uint8Array(0), + proto: '' + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.addr = reader.bytes() + break + case 3: + obj.proto = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, StreamInfo.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): StreamInfo => { + return decodeMessage(buf, StreamInfo.codec()) + } +} + +export interface DHTRequest { + type?: DHTRequest.Type + peer?: Uint8Array + cid?: Uint8Array + key?: Uint8Array + value?: Uint8Array + count?: number + timeout?: bigint +} + +export namespace DHTRequest { + export enum Type { + FIND_PEER = 'FIND_PEER', + FIND_PEERS_CONNECTED_TO_PEER = 'FIND_PEERS_CONNECTED_TO_PEER', + FIND_PROVIDERS = 'FIND_PROVIDERS', + GET_CLOSEST_PEERS = 'GET_CLOSEST_PEERS', + GET_PUBLIC_KEY = 'GET_PUBLIC_KEY', + GET_VALUE = 'GET_VALUE', + SEARCH_VALUE = 'SEARCH_VALUE', + PUT_VALUE = 'PUT_VALUE', + PROVIDE = 'PROVIDE' + } + + enum __TypeValues { + FIND_PEER = 0, + FIND_PEERS_CONNECTED_TO_PEER = 1, + FIND_PROVIDERS = 2, + GET_CLOSEST_PEERS = 3, + GET_PUBLIC_KEY = 4, + GET_VALUE = 5, + SEARCH_VALUE = 6, + PUT_VALUE = 7, + PROVIDE = 8 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + DHTRequest.Type.codec().encode(obj.type, w) + } + + if (obj.peer != null) { + w.uint32(18) + w.bytes(obj.peer) + } + + if (obj.cid != null) { + w.uint32(26) + w.bytes(obj.cid) + } + + if (obj.key != null) { + w.uint32(34) + w.bytes(obj.key) + } + + if (obj.value != null) { + w.uint32(42) + w.bytes(obj.value) + } + + if (obj.count != null) { + w.uint32(48) + w.int32(obj.count) + } + + if (obj.timeout != null) { + w.uint32(56) + w.int64(obj.timeout) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = DHTRequest.Type.codec().decode(reader) + break + case 2: + obj.peer = reader.bytes() + break + case 3: + obj.cid = reader.bytes() + break + case 4: + obj.key = reader.bytes() + break + case 5: + obj.value = reader.bytes() + break + case 6: + obj.count = reader.int32() + break + case 7: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, DHTRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): DHTRequest => { + return decodeMessage(buf, DHTRequest.codec()) + } +} + +export interface DHTResponse { + type?: DHTResponse.Type + peer?: PeerInfo + value?: Uint8Array +} + +export namespace DHTResponse { + export enum Type { + BEGIN = 'BEGIN', + VALUE = 'VALUE', + END = 'END' + } + + enum __TypeValues { + BEGIN = 0, + VALUE = 1, + END = 2 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + DHTResponse.Type.codec().encode(obj.type, w) + } + + if (obj.peer != null) { + w.uint32(18) + PeerInfo.codec().encode(obj.peer, w) + } + + if (obj.value != null) { + w.uint32(26) + w.bytes(obj.value) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = DHTResponse.Type.codec().decode(reader) + break + case 2: + obj.peer = PeerInfo.codec().decode(reader, reader.uint32()) + break + case 3: + obj.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, DHTResponse.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): DHTResponse => { + return decodeMessage(buf, DHTResponse.codec()) + } +} + +export interface PeerInfo { + id: Uint8Array + addrs: Uint8Array[] +} + +export namespace PeerInfo { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.id != null && obj.id.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.id) + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18) + w.bytes(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + id: new Uint8Array(0), + addrs: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PeerInfo.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PeerInfo => { + return decodeMessage(buf, PeerInfo.codec()) + } +} + +export interface ConnManagerRequest { + type?: ConnManagerRequest.Type + peer?: Uint8Array + tag?: string + weight?: bigint +} + +export namespace ConnManagerRequest { + export enum Type { + TAG_PEER = 'TAG_PEER', + UNTAG_PEER = 'UNTAG_PEER', + TRIM = 'TRIM' + } + + enum __TypeValues { + TAG_PEER = 0, + UNTAG_PEER = 1, + TRIM = 2 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + ConnManagerRequest.Type.codec().encode(obj.type, w) + } + + if (obj.peer != null) { + w.uint32(18) + w.bytes(obj.peer) + } + + if (obj.tag != null) { + w.uint32(26) + w.string(obj.tag) + } + + if (obj.weight != null) { + w.uint32(32) + w.int64(obj.weight) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = ConnManagerRequest.Type.codec().decode(reader) + break + case 2: + obj.peer = reader.bytes() + break + case 3: + obj.tag = reader.string() + break + case 4: + obj.weight = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, ConnManagerRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ConnManagerRequest => { + return decodeMessage(buf, ConnManagerRequest.codec()) + } +} + +export interface DisconnectRequest { + peer: Uint8Array +} + +export namespace DisconnectRequest { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.peer != null && obj.peer.byteLength > 0)) { + w.uint32(10) + w.bytes(obj.peer) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + peer: new Uint8Array(0) + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, DisconnectRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): DisconnectRequest => { + return decodeMessage(buf, DisconnectRequest.codec()) + } +} + +export interface PSRequest { + type?: PSRequest.Type + topic?: string + data?: Uint8Array +} + +export namespace PSRequest { + export enum Type { + GET_TOPICS = 'GET_TOPICS', + LIST_PEERS = 'LIST_PEERS', + PUBLISH = 'PUBLISH', + SUBSCRIBE = 'SUBSCRIBE' + } + + enum __TypeValues { + GET_TOPICS = 0, + LIST_PEERS = 1, + PUBLISH = 2, + SUBSCRIBE = 3 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + PSRequest.Type.codec().encode(obj.type, w) + } + + if (obj.topic != null) { + w.uint32(18) + w.string(obj.topic) + } + + if (obj.data != null) { + w.uint32(26) + w.bytes(obj.data) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = PSRequest.Type.codec().decode(reader) + break + case 2: + obj.topic = reader.string() + break + case 3: + obj.data = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PSRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PSRequest => { + return decodeMessage(buf, PSRequest.codec()) + } +} + +export interface PSMessage { + from?: Uint8Array + data?: Uint8Array + seqno?: Uint8Array + topicIDs: string[] + signature?: Uint8Array + key?: Uint8Array +} + +export namespace PSMessage { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.from != null) { + w.uint32(10) + w.bytes(obj.from) + } + + if (obj.data != null) { + w.uint32(18) + w.bytes(obj.data) + } + + if (obj.seqno != null) { + w.uint32(26) + w.bytes(obj.seqno) + } + + if (obj.topicIDs != null) { + for (const value of obj.topicIDs) { + w.uint32(34) + w.string(value) + } + } + + if (obj.signature != null) { + w.uint32(42) + w.bytes(obj.signature) + } + + if (obj.key != null) { + w.uint32(50) + w.bytes(obj.key) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + topicIDs: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.from = reader.bytes() + break + case 2: + obj.data = reader.bytes() + break + case 3: + obj.seqno = reader.bytes() + break + case 4: + obj.topicIDs.push(reader.string()) + break + case 5: + obj.signature = reader.bytes() + break + case 6: + obj.key = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PSMessage.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PSMessage => { + return decodeMessage(buf, PSMessage.codec()) + } +} + +export interface PSResponse { + topics: string[] + peerIDs: Uint8Array[] +} + +export namespace PSResponse { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.topics != null) { + for (const value of obj.topics) { + w.uint32(10) + w.string(value) + } + } + + if (obj.peerIDs != null) { + for (const value of obj.peerIDs) { + w.uint32(18) + w.bytes(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + topics: [], + peerIDs: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.topics.push(reader.string()) + break + case 2: + obj.peerIDs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PSResponse.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PSResponse => { + return decodeMessage(buf, PSResponse.codec()) + } +} + +export interface PeerstoreRequest { + type?: PeerstoreRequest.Type + id?: Uint8Array + protos: string[] +} + +export namespace PeerstoreRequest { + export enum Type { + UNSPECIFIED = 'UNSPECIFIED', + GET_PROTOCOLS = 'GET_PROTOCOLS', + GET_PEER_INFO = 'GET_PEER_INFO' + } + + enum __TypeValues { + UNSPECIFIED = 0, + GET_PROTOCOLS = 1, + GET_PEER_INFO = 2 + } + + export namespace Type { + export const codec = (): Codec => { + return enumeration(__TypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.type != null) { + w.uint32(8) + PeerstoreRequest.Type.codec().encode(obj.type, w) + } + + if (obj.id != null) { + w.uint32(18) + w.bytes(obj.id) + } + + if (obj.protos != null) { + for (const value of obj.protos) { + w.uint32(26) + w.string(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + protos: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = PeerstoreRequest.Type.codec().decode(reader) + break + case 2: + obj.id = reader.bytes() + break + case 3: + obj.protos.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PeerstoreRequest.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PeerstoreRequest => { + return decodeMessage(buf, PeerstoreRequest.codec()) + } +} + +export interface PeerstoreResponse { + peer?: PeerInfo + protos: string[] +} + +export namespace PeerstoreResponse { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.peer != null) { + w.uint32(10) + PeerInfo.codec().encode(obj.peer, w) + } + + if (obj.protos != null) { + for (const value of obj.protos) { + w.uint32(18) + w.string(value) + } + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + protos: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = PeerInfo.codec().decode(reader, reader.uint32()) + break + case 2: + obj.protos.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, PeerstoreResponse.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PeerstoreResponse => { + return decodeMessage(buf, PeerstoreResponse.codec()) + } +} diff --git a/packages/libp2p-daemon-protocol/src/stream-handler.ts b/packages/libp2p-daemon-protocol/src/stream-handler.ts new file mode 100644 index 0000000000..e87978c1e8 --- /dev/null +++ b/packages/libp2p-daemon-protocol/src/stream-handler.ts @@ -0,0 +1,58 @@ +import { logger } from '@libp2p/logger' +import { lpStream } from '@libp2p/utils' +import type { MultiaddrConnection } from '@libp2p/interface' +import type { LengthPrefixedStream } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' + +const log = logger('libp2p:daemon-protocol:stream-handler') + +export interface StreamHandlerOptions { + stream: MultiaddrConnection + maxLength?: number +} + +export class StreamHandler { + private readonly stream: MultiaddrConnection + private readonly lp: LengthPrefixedStream + + /** + * Create a stream handler for connection + */ + constructor (opts: StreamHandlerOptions) { + const { stream, maxLength } = opts + + this.stream = stream + this.lp = lpStream(this.stream, { maxDataLength: maxLength ?? 4096 }) + } + + /** + * Read and decode message + */ + async read (): Promise { + try { + return await this.lp.read() + } catch (err) { + log.error('read received no value', err) + } + } + + async write (msg: Uint8Array | Uint8ArrayList): Promise { + log('write message') + await this.lp.write(msg) + } + + /** + * Return the handshake rest stream and invalidate handler + */ + rest (): MultiaddrConnection { + return this.lp.unwrap() + } + + /** + * Close the stream + */ + async close (): Promise { + log('closing the stream') + await this.rest().closeWrite() + } +} diff --git a/packages/libp2p-daemon-protocol/src/upgrader.ts b/packages/libp2p-daemon-protocol/src/upgrader.ts new file mode 100644 index 0000000000..1f493d2da2 --- /dev/null +++ b/packages/libp2p-daemon-protocol/src/upgrader.ts @@ -0,0 +1,35 @@ +import { anySignal } from 'any-signal' +import type { ClearableSignal, Connection, ConnectionEncrypter, MultiaddrConnection, StreamMuxerFactory, Upgrader } from '@libp2p/interface' + +export interface OnConnection { + (conn: MultiaddrConnection): void +} + +export class PassThroughUpgrader implements Upgrader { + private readonly onConnection?: OnConnection + + constructor (handler?: OnConnection) { + this.onConnection = handler + } + + async upgradeInbound (maConn: MultiaddrConnection): Promise { + this.onConnection?.(maConn) + } + + async upgradeOutbound (maConn: MultiaddrConnection): Promise { + // @ts-expect-error should return a connection + return maConn + } + + createInboundAbortSignal (signal: AbortSignal): ClearableSignal { + return anySignal([signal]) + } + + getStreamMuxers (): Map { + return new Map() + } + + getConnectionEncrypters (): Map> { + return new Map() + } +} diff --git a/packages/libp2p-daemon-protocol/tsconfig.json b/packages/libp2p-daemon-protocol/tsconfig.json new file mode 100644 index 0000000000..f038990e08 --- /dev/null +++ b/packages/libp2p-daemon-protocol/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src" + ], + "exclude": [ + "src/index.js" + ] +} diff --git a/packages/libp2p-daemon-protocol/typedoc.json b/packages/libp2p-daemon-protocol/typedoc.json new file mode 100644 index 0000000000..5829ed90eb --- /dev/null +++ b/packages/libp2p-daemon-protocol/typedoc.json @@ -0,0 +1,8 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts", + "./src/stream-handler.ts", + "./src/upgrader.ts" + ] +} diff --git a/packages/libp2p-daemon-server/.aegir.js b/packages/libp2p-daemon-server/.aegir.js new file mode 100644 index 0000000000..135a6a2211 --- /dev/null +++ b/packages/libp2p-daemon-server/.aegir.js @@ -0,0 +1,8 @@ + +export default { + build: { + config: { + platform: 'node' + } + } +} diff --git a/packages/libp2p-daemon-server/CHANGELOG.md b/packages/libp2p-daemon-server/CHANGELOG.md new file mode 100644 index 0000000000..e01e31e999 --- /dev/null +++ b/packages/libp2p-daemon-server/CHANGELOG.md @@ -0,0 +1,389 @@ +## [@libp2p/daemon-server-v8.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-8.0.5...@libp2p/daemon-server-8.0.6) (2025-08-13) + +### Dependencies + +* bump aegir from 46.0.5 to 47.0.21 ([#343](https://github.com/libp2p/js-libp2p-daemon/issues/343)) ([704c22f](https://github.com/libp2p/js-libp2p-daemon/commit/704c22f102362c6036642a73979d262e1214baa5)) + +## [@libp2p/daemon-server-v8.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-8.0.4...@libp2p/daemon-server-8.0.5) (2025-05-19) + +### Dependencies + +* bump aegir from 45.2.1 to 46.0.2 ([#297](https://github.com/libp2p/js-libp2p-daemon/issues/297)) ([09c1457](https://github.com/libp2p/js-libp2p-daemon/commit/09c1457ce93a45cab43869892cd9174617a34c29)) + +## [@libp2p/daemon-server-v8.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-8.0.3...@libp2p/daemon-server-8.0.4) (2025-02-24) + +### Bug Fixes + +* pass abort signal to dial ([#285](https://github.com/libp2p/js-libp2p-daemon/issues/285)) ([a739825](https://github.com/libp2p/js-libp2p-daemon/commit/a7398251d9c77f357ffcacb83fa7ebcd1039b114)) + +### Dependencies + +* bump it-length-prefixed from 9.1.1 to 10.0.1 ([#284](https://github.com/libp2p/js-libp2p-daemon/issues/284)) ([48dcb19](https://github.com/libp2p/js-libp2p-daemon/commit/48dcb19cc8db772509cc709298610484c186a142)) + +## [@libp2p/daemon-server-v8.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-8.0.2...@libp2p/daemon-server-8.0.3) (2024-11-08) + +### Bug Fixes + +* fix mismatched versions ([c6d619f](https://github.com/libp2p/js-libp2p-daemon/commit/c6d619f9af99a5a28089aeec2f047ca1fca1f2e1)) +* mismatched versions ([98500b8](https://github.com/libp2p/js-libp2p-daemon/commit/98500b8a150c1ebb19c1f51efdfa1836136f3726)) +* update project config ([4cf3a98](https://github.com/libp2p/js-libp2p-daemon/commit/4cf3a98dd76f8a41ef7f70d9e1696f2a06049f69)) + +## @libp2p/daemon-server [7.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-7.0.5...@libp2p/daemon-server-7.0.6) (2024-08-01) + + +### Bug Fixes + +* use "limited" instead of "transient" ([#272](https://github.com/libp2p/js-libp2p-daemon/issues/272)) ([d43c177](https://github.com/libp2p/js-libp2p-daemon/commit/d43c177a355bd02c719c7644519ebef54a81386f)) + +## @libp2p/daemon-server [7.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-7.0.4...@libp2p/daemon-server-7.0.5) (2024-04-29) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 11.2.1 to 13.0.0 ([#265](https://github.com/libp2p/js-libp2p-daemon/issues/265)) ([bcd5041](https://github.com/libp2p/js-libp2p-daemon/commit/bcd504110f58f32977f31ec38989180187ab8bc2)) + +## @libp2p/daemon-server [7.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-7.0.3...@libp2p/daemon-server-7.0.4) (2024-02-07) + + +### Dependencies + +* bump @libp2p/kad-dht from 11.0.8 to 12.0.5 ([#261](https://github.com/libp2p/js-libp2p-daemon/issues/261)) ([1cbaa23](https://github.com/libp2p/js-libp2p-daemon/commit/1cbaa23c0071e8d599fcef0859de41ac04f2606d)) +* bump uint8arrays from 4.0.10 to 5.0.1 ([#263](https://github.com/libp2p/js-libp2p-daemon/issues/263)) ([b5eb311](https://github.com/libp2p/js-libp2p-daemon/commit/b5eb3114be41176f47fd49164322285aaa8549c1)) + +## @libp2p/daemon-server [7.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v7.0.2...@libp2p/daemon-server-7.0.3) (2024-02-07) + + +### Dependencies + +* bump aegir from 41.3.5 to 42.2.3 ([#262](https://github.com/libp2p/js-libp2p-daemon/issues/262)) ([2bb9733](https://github.com/libp2p/js-libp2p-daemon/commit/2bb97338d76e4cc48490326083fb13bd9ae60a74)) + + + +### Dependencies + +* **@libp2p/daemon-protocol:** upgraded to 6.0.2 + +## [@libp2p/daemon-server-v7.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v7.0.1...@libp2p/daemon-server-v7.0.2) (2024-02-07) + + +### Dependencies + +* bump multiformats from 12.1.3 to 13.0.1 ([#253](https://github.com/libp2p/js-libp2p-daemon/issues/253)) ([aebd43a](https://github.com/libp2p/js-libp2p-daemon/commit/aebd43ac1e7abae209ce4cc198989c8161a1b022)) +* bump uint8arrays from 4.0.10 to 5.0.1 ([#248](https://github.com/libp2p/js-libp2p-daemon/issues/248)) ([290bb2a](https://github.com/libp2p/js-libp2p-daemon/commit/290bb2ac7c3bf1cdb5174b60010888fbd91a2f17)) + +## [@libp2p/daemon-server-v7.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v7.0.0...@libp2p/daemon-server-v7.0.1) (2023-12-04) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 10.1.1 to 11.0.0 ([#244](https://github.com/libp2p/js-libp2p-daemon/issues/244)) ([6cc8c3c](https://github.com/libp2p/js-libp2p-daemon/commit/6cc8c3c96316fcb4dc32f24dc7d25414dec5f80d)) + +## [@libp2p/daemon-server-v7.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v6.0.3...@libp2p/daemon-server-v7.0.0) (2023-11-30) + + +### ⚠ BREAKING CHANGES + +* updates js-libp2p to v1 + +### Trivial Changes + +* update sibling dependencies ([865cb8a](https://github.com/libp2p/js-libp2p-daemon/commit/865cb8a7bf165092f90455dcc895ffa7e97df432)) + + +### Dependencies + +* update libp2p to v1 ([#235](https://github.com/libp2p/js-libp2p-daemon/issues/235)) ([6f2917b](https://github.com/libp2p/js-libp2p-daemon/commit/6f2917b714756e3632ff6c522668f7c2166d4389)) + +## [@libp2p/daemon-server-v6.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v6.0.2...@libp2p/daemon-server-v6.0.3) (2023-11-10) + + +### Bug Fixes + +* add logger field ([#234](https://github.com/libp2p/js-libp2p-daemon/issues/234)) ([6f4728c](https://github.com/libp2p/js-libp2p-daemon/commit/6f4728c447859db17aaee613060b67271922fc2a)) + +## [@libp2p/daemon-server-v6.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v6.0.1...@libp2p/daemon-server-v6.0.2) (2023-11-02) + + +### Bug Fixes + +* add additional logging ([94d21cb](https://github.com/libp2p/js-libp2p-daemon/commit/94d21cbde3f22898d1f9e261c243283e9bae3dd6)) + + +### Dependencies + +* bump aegir from 40.0.13 to 41.1.6 ([#232](https://github.com/libp2p/js-libp2p-daemon/issues/232)) ([653c74b](https://github.com/libp2p/js-libp2p-daemon/commit/653c74b6272fd6d11d686bf7bb44b49b6757b633)) +* **dev:** bump sinon-ts from 1.0.2 to 2.0.0 ([#233](https://github.com/libp2p/js-libp2p-daemon/issues/233)) ([de13473](https://github.com/libp2p/js-libp2p-daemon/commit/de13473ffd981c0488c27402e16c134f49e4b526)) + +## [@libp2p/daemon-server-v6.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v6.0.0...@libp2p/daemon-server-v6.0.1) (2023-08-04) + + +### Dependencies + +* bump @chainsafe/libp2p-gossipsub from 9.1.0 to 10.0.0 ([#214](https://github.com/libp2p/js-libp2p-daemon/issues/214)) ([0308811](https://github.com/libp2p/js-libp2p-daemon/commit/0308811a2ea29d20de3f6a43db32720f21fb9b3f)) + +## [@libp2p/daemon-server-v6.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v5.0.2...@libp2p/daemon-server-v6.0.0) (2023-07-31) + + +### ⚠ BREAKING CHANGES + +* stream close methods are now asyc, requires libp2p@0.46.x or later + +### Features + +* close streams gracefully ([#213](https://github.com/libp2p/js-libp2p-daemon/issues/213)) ([92eebfa](https://github.com/libp2p/js-libp2p-daemon/commit/92eebfa12ba1fb42ae6c9e164fb0d69647e62074)) + + +### Dependencies + +* bump aegir from 38.1.8 to 39.0.1 ([#202](https://github.com/libp2p/js-libp2p-daemon/issues/202)) ([3bf4027](https://github.com/libp2p/js-libp2p-daemon/commit/3bf402752a92c3ebb96435eaa7923ce22ef76ea0)) +* update sibling dependencies ([ba4dd19](https://github.com/libp2p/js-libp2p-daemon/commit/ba4dd190e0e4101291195d5ffdf6bd3f982ee457)) + +## [@libp2p/daemon-server-v5.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v5.0.1...@libp2p/daemon-server-v5.0.2) (2023-04-27) + + +### Bug Fixes + +* use interface-libp2p to ensure the correct services are set ([#203](https://github.com/libp2p/js-libp2p-daemon/issues/203)) ([8602a70](https://github.com/libp2p/js-libp2p-daemon/commit/8602a704e45cfa768ad55974d025b2d4be6f42a9)) + +## [@libp2p/daemon-server-v5.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v5.0.0...@libp2p/daemon-server-v5.0.1) (2023-04-24) + + +### Dependencies + +* bump @libp2p/interface-peer-store from 1.2.9 to 2.0.0 ([#201](https://github.com/libp2p/js-libp2p-daemon/issues/201)) ([9b146a8](https://github.com/libp2p/js-libp2p-daemon/commit/9b146a8c38c30a13401be6da5259cd9da6bdc25c)) + +## [@libp2p/daemon-server-v5.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.1.4...@libp2p/daemon-server-v5.0.0) (2023-04-19) + + +### ⚠ BREAKING CHANGES + +* the type of the source/sink properties have changed + +### Dependencies + +* update it-stream-types to 2.x.x ([#196](https://github.com/libp2p/js-libp2p-daemon/issues/196)) ([a09f6d5](https://github.com/libp2p/js-libp2p-daemon/commit/a09f6d58942033b08b579735aaa1537b3a324776)) +* update sibling dependencies ([e0ec5ec](https://github.com/libp2p/js-libp2p-daemon/commit/e0ec5ecf5bfd7f801274d37d51c3dcce652de2ba)) + +## [@libp2p/daemon-server-v4.1.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.1.3...@libp2p/daemon-server-v4.1.4) (2023-04-12) + + +### Dependencies + +* bump @libp2p/interface-connection from 3.1.1 to 4.0.0 ([#195](https://github.com/libp2p/js-libp2p-daemon/issues/195)) ([798ecc5](https://github.com/libp2p/js-libp2p-daemon/commit/798ecc594bc64c8e34aad13e1b9884011f0b1f29)) + +## [@libp2p/daemon-server-v4.1.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.1.2...@libp2p/daemon-server-v4.1.3) (2023-04-03) + + +### Dependencies + +* update all it-* deps to the latest versions ([#193](https://github.com/libp2p/js-libp2p-daemon/issues/193)) ([cb0aa85](https://github.com/libp2p/js-libp2p-daemon/commit/cb0aa85bbbad651db088594622a9438a127d2a10)) + +## [@libp2p/daemon-server-v4.1.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.1.1...@libp2p/daemon-server-v4.1.2) (2023-03-31) + + +### Dependencies + +* bump it-drain from 2.0.1 to 3.0.1 ([#190](https://github.com/libp2p/js-libp2p-daemon/issues/190)) ([306bdc4](https://github.com/libp2p/js-libp2p-daemon/commit/306bdc4fc139c3af429314d7b7d78d0a2238d6f4)) + +## [@libp2p/daemon-server-v4.1.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.1.0...@libp2p/daemon-server-v4.1.1) (2023-03-17) + + +### Dependencies + +* bump @multiformats/multiaddr from 11.6.1 to 12.0.0 ([#189](https://github.com/libp2p/js-libp2p-daemon/issues/189)) ([aaf7e2e](https://github.com/libp2p/js-libp2p-daemon/commit/aaf7e2e37423cae78cd16d8e16e06db40fdcd1e3)) + +## [@libp2p/daemon-server-v4.1.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.0.3...@libp2p/daemon-server-v4.1.0) (2023-02-23) + + +### Features + +* add get subscribers for pubsub topics ([#184](https://github.com/libp2p/js-libp2p-daemon/issues/184)) ([c8be43e](https://github.com/libp2p/js-libp2p-daemon/commit/c8be43e5acd6a74cfdd01857343af6f6d8210d5d)) + +## [@libp2p/daemon-server-v4.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.0.2...@libp2p/daemon-server-v4.0.3) (2023-02-22) + + +### Dependencies + +* bump aegir from 37.12.1 to 38.1.6 ([#183](https://github.com/libp2p/js-libp2p-daemon/issues/183)) ([6725a0a](https://github.com/libp2p/js-libp2p-daemon/commit/6725a0aeba9acb56a7530dece6c65a0f3eadfec5)) + +## [@libp2p/daemon-server-v4.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.0.1...@libp2p/daemon-server-v4.0.2) (2023-02-22) + + +### Trivial Changes + +* remove lerna ([#171](https://github.com/libp2p/js-libp2p-daemon/issues/171)) ([367f912](https://github.com/libp2p/js-libp2p-daemon/commit/367f9122f2fe1c31c8de7a136cda18d024ff08d7)) + + +### Dependencies + +* **dev:** bump sinon from 14.0.2 to 15.0.1 ([#166](https://github.com/libp2p/js-libp2p-daemon/issues/166)) ([1702efb](https://github.com/libp2p/js-libp2p-daemon/commit/1702efb4248bea4cb9ec19c694c1caae1c0ff16d)) + +## [@libp2p/daemon-server-v4.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v4.0.0...@libp2p/daemon-server-v4.0.1) (2023-01-07) + + +### Dependencies + +* bump @libp2p/tcp from 5.0.2 to 6.0.8 ([#165](https://github.com/libp2p/js-libp2p-daemon/issues/165)) ([fb676ab](https://github.com/libp2p/js-libp2p-daemon/commit/fb676ab66348b3c704d2385b4da0d7173bc4a04d)) + +## [@libp2p/daemon-server-v4.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.5...@libp2p/daemon-server-v4.0.0) (2023-01-07) + + +### ⚠ BREAKING CHANGES + +* Update multiformats and related dependencies (#170) + +### Dependencies + +* Update multiformats and related dependencies ([#170](https://github.com/libp2p/js-libp2p-daemon/issues/170)) ([06744a7](https://github.com/libp2p/js-libp2p-daemon/commit/06744a77006dc77dcfb7bd860e4dc6f36a535603)) + +## [@libp2p/daemon-server-v3.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.4...@libp2p/daemon-server-v3.0.5) (2022-10-17) + + +### Dependencies + +* bump it-drain from 1.0.5 to 2.0.0 ([#147](https://github.com/libp2p/js-libp2p-daemon/issues/147)) ([56663f8](https://github.com/libp2p/js-libp2p-daemon/commit/56663f83255a0720b4bf4c7e3805ee4ced8dc86d)) + +## [@libp2p/daemon-server-v3.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.3...@libp2p/daemon-server-v3.0.4) (2022-10-14) + + +### Dependencies + +* **dev:** bump sinon-ts from 0.0.2 to 1.0.0 ([#144](https://github.com/libp2p/js-libp2p-daemon/issues/144)) ([cfc8755](https://github.com/libp2p/js-libp2p-daemon/commit/cfc8755aa1280ac4fc2aae67cf47d7b0b93f605d)) + +## [@libp2p/daemon-server-v3.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.2...@libp2p/daemon-server-v3.0.3) (2022-10-13) + + +### Dependencies + +* update uint8arrays, protons and multiformats ([#143](https://github.com/libp2p/js-libp2p-daemon/issues/143)) ([661139c](https://github.com/libp2p/js-libp2p-daemon/commit/661139c674c9994724e32227d7d9ae2c5da1cea2)) + +## [@libp2p/daemon-server-v3.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.1...@libp2p/daemon-server-v3.0.2) (2022-10-07) + + +### Dependencies + +* bump @libp2p/interface-transport from 1.0.4 to 2.0.0 ([#132](https://github.com/libp2p/js-libp2p-daemon/issues/132)) ([1a7b2cc](https://github.com/libp2p/js-libp2p-daemon/commit/1a7b2cc653dfb51e92edb1f652452e3c793156c3)) +* bump @libp2p/tcp from 3.0.0 to 4.0.1 ([4e64dce](https://github.com/libp2p/js-libp2p-daemon/commit/4e64dce5e6d18dadaa54a20fff7b2da8bbca11ae)) + +## [@libp2p/daemon-server-v3.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v3.0.0...@libp2p/daemon-server-v3.0.1) (2022-09-21) + + +### Dependencies + +* update @multiformats/multiaddr to 11.0.0 ([#128](https://github.com/libp2p/js-libp2p-daemon/issues/128)) ([885d901](https://github.com/libp2p/js-libp2p-daemon/commit/885d9013d82a62e6756b06350932df1242a13296)) + +## [@libp2p/daemon-server-v3.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v2.0.4...@libp2p/daemon-server-v3.0.0) (2022-09-09) + + +### ⚠ BREAKING CHANGES + +* the stream type returned by `client.openStream` has changed + +### Bug Fixes + +* allow opening remote streams ([#126](https://github.com/libp2p/js-libp2p-daemon/issues/126)) ([361cc57](https://github.com/libp2p/js-libp2p-daemon/commit/361cc5750de505ab0381ae43609c67d5d4f659a7)) + + +### Dependencies + +* update sibling dependencies ([c3ebd58](https://github.com/libp2p/js-libp2p-daemon/commit/c3ebd588abc36ef45667e8e4e4c0e220303b7510)) + +## [@libp2p/daemon-server-v2.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v2.0.3...@libp2p/daemon-server-v2.0.4) (2022-08-10) + + +### Bug Fixes + +* update all deps ([#124](https://github.com/libp2p/js-libp2p-daemon/issues/124)) ([5e46e1e](https://github.com/libp2p/js-libp2p-daemon/commit/5e46e1e26c23428046a6007ab158420d3d830145)) + + +### Documentation + +* readme update ([f569ffc](https://github.com/libp2p/js-libp2p-daemon/commit/f569ffc5c3956248e685d99904408fd3f4d868f4)) + +## [@libp2p/daemon-server-v2.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v2.0.2...@libp2p/daemon-server-v2.0.3) (2022-07-31) + + +### Trivial Changes + +* update project config ([#111](https://github.com/libp2p/js-libp2p-daemon/issues/111)) ([345e663](https://github.com/libp2p/js-libp2p-daemon/commit/345e663e34278e780fc2f3a6b595294f925c4521)) + + +### Dependencies + +* update uint8arraylist and protons deps ([#115](https://github.com/libp2p/js-libp2p-daemon/issues/115)) ([34a8334](https://github.com/libp2p/js-libp2p-daemon/commit/34a83340ba855a9c08319ae1cd735dfa8b71c248)) + +## [@libp2p/daemon-server-v2.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v2.0.1...@libp2p/daemon-server-v2.0.2) (2022-06-17) + + +### Trivial Changes + +* update deps ([#105](https://github.com/libp2p/js-libp2p-daemon/issues/105)) ([0bdab0e](https://github.com/libp2p/js-libp2p-daemon/commit/0bdab0ee254e32d6dca0e5fe239d4ef16db41b87)) + +## [@libp2p/daemon-server-v2.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v2.0.0...@libp2p/daemon-server-v2.0.1) (2022-06-15) + + +### Trivial Changes + +* update deps ([#103](https://github.com/libp2p/js-libp2p-daemon/issues/103)) ([2bfaa37](https://github.com/libp2p/js-libp2p-daemon/commit/2bfaa37e2f056dcd5de5a3882b77f52553c595d4)) + +## [@libp2p/daemon-server-v2.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.5...@libp2p/daemon-server-v2.0.0) (2022-06-15) + + +### ⚠ BREAKING CHANGES + +* uses new single-issue libp2p interface modules + +### Features + +* update to latest libp2p interfaces ([#102](https://github.com/libp2p/js-libp2p-daemon/issues/102)) ([f5e9121](https://github.com/libp2p/js-libp2p-daemon/commit/f5e91210654ab3c411e316c1c657356c037a0f6a)) + +## [@libp2p/daemon-server-v1.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.4...@libp2p/daemon-server-v1.0.5) (2022-05-25) + + +### Trivial Changes + +* update docs ([#91](https://github.com/libp2p/js-libp2p-daemon/issues/91)) ([5b072ff](https://github.com/libp2p/js-libp2p-daemon/commit/5b072ff89f30fd6cf55a3387bf0961c8ad78a22f)), closes [#83](https://github.com/libp2p/js-libp2p-daemon/issues/83) + +## [@libp2p/daemon-server-v1.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.3...@libp2p/daemon-server-v1.0.4) (2022-05-23) + + +### Bug Fixes + +* update deps ([#90](https://github.com/libp2p/js-libp2p-daemon/issues/90)) ([b50eba3](https://github.com/libp2p/js-libp2p-daemon/commit/b50eba3770e47969dbc30cbcf87c41672cd9c175)) + +## [@libp2p/daemon-server-v1.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.2...@libp2p/daemon-server-v1.0.3) (2022-05-10) + + +### Bug Fixes + +* encode enums correctly ([#86](https://github.com/libp2p/js-libp2p-daemon/issues/86)) ([6ce4633](https://github.com/libp2p/js-libp2p-daemon/commit/6ce4633f3db41ab66f9b8b1abbe84955dde3e9be)) + +## [@libp2p/daemon-server-v1.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.1...@libp2p/daemon-server-v1.0.2) (2022-04-20) + + +### Bug Fixes + +* update interfaces and deps ([#84](https://github.com/libp2p/js-libp2p-daemon/issues/84)) ([25173d5](https://github.com/libp2p/js-libp2p-daemon/commit/25173d5b2edf0e9dd9132707d349cdc862caecdb)) + +## [@libp2p/daemon-server-v1.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-server-v1.0.0...@libp2p/daemon-server-v1.0.1) (2022-04-07) + + +### Bug Fixes + +* remove protobufjs and replace with protons ([#81](https://github.com/libp2p/js-libp2p-daemon/issues/81)) ([78dd02a](https://github.com/libp2p/js-libp2p-daemon/commit/78dd02a679e55f22c7e24c1ee2b6f92a4679a0b9)) + + +### Trivial Changes + +* update aegir to latest version ([#80](https://github.com/libp2p/js-libp2p-daemon/issues/80)) ([3a98959](https://github.com/libp2p/js-libp2p-daemon/commit/3a98959617d9c19bba9fb064defee3d51acfcc29)) + +## @libp2p/daemon-server-v1.0.0 (2022-03-28) + + +### ⚠ BREAKING CHANGES + +* This module is now ESM only + +### Features + +* convert to typescript ([#78](https://github.com/libp2p/js-libp2p-daemon/issues/78)) ([f18b2a4](https://github.com/libp2p/js-libp2p-daemon/commit/f18b2a45871a2704db51b03e8583eefdcd13554c)) diff --git a/packages/libp2p-daemon-server/CODE_OF_CONDUCT.md b/packages/libp2p-daemon-server/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..6b0fa54c54 --- /dev/null +++ b/packages/libp2p-daemon-server/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/libp2p-daemon-server/LICENSE-APACHE b/packages/libp2p-daemon-server/LICENSE-APACHE new file mode 100644 index 0000000000..b09cd7856d --- /dev/null +++ b/packages/libp2p-daemon-server/LICENSE-APACHE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/libp2p-daemon-server/LICENSE-MIT b/packages/libp2p-daemon-server/LICENSE-MIT new file mode 100644 index 0000000000..72dc60d84b --- /dev/null +++ b/packages/libp2p-daemon-server/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/libp2p-daemon-server/README.md b/packages/libp2p-daemon-server/README.md new file mode 100644 index 0000000000..a704eb7666 --- /dev/null +++ b/packages/libp2p-daemon-server/README.md @@ -0,0 +1,50 @@ +# @libp2p/daemon-server + +[![libp2p.io](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) +[![Discuss](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg?style=flat-square)](https://discuss.libp2p.io) +[![codecov](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-daemon.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-daemon) +[![CI](https://img.shields.io/github/actions/workflow/status/libp2p/js-libp2p-daemon/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/libp2p/js-libp2p-daemon/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> API server for libp2p-daemon instances + +# Install + +```console +$ npm i @libp2p/daemon-server +``` + +# Specs + +The specs for the daemon are currently housed in the go implementation. You can read them at [libp2p/go-libp2p-daemon](https://github.com/libp2p/go-libp2p-daemon/blob/master/specs/README.md) + +# Usage + +```js +import { createServer } from '@libp2p/daemon-server' +import { createLibp2p } from 'libp2p' +import { multiaddr } from '@multiformats/multiaddr' + +const libp2p = await createLibp2p({ + // ..config +}) + +const multiaddr = multiaddr('/ip4/0.0.0.0/tcp/0') + +const server = await createServer(multiaddr, libp2p) +await server.start() +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-server/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon-server/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/libp2p-daemon-server/package.json b/packages/libp2p-daemon-server/package.json new file mode 100644 index 0000000000..18b0a2c49f --- /dev/null +++ b/packages/libp2p-daemon-server/package.json @@ -0,0 +1,159 @@ +{ + "name": "@libp2p/daemon-server", + "version": "8.0.6", + "description": "API server for libp2p-daemon instances", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/libp2p/js-libp2p-daemon/tree/main/packages/libp2p-daemon-server#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/libp2p/js-libp2p-daemon.git" + }, + "bugs": { + "url": "https://github.com/libp2p/js-libp2p-daemon/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "libp2p" + ], + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "build": "aegir build", + "pretest": "npm run build", + "test": "aegir test -t node", + "test:node": "aegir test -t node", + "release": "aegir release" + }, + "dependencies": { + "@chainsafe/libp2p-gossipsub": "^14.1.1", + "@libp2p/crypto": "^5.1.7", + "@libp2p/daemon-protocol": "^7.0.0", + "@libp2p/interface": "^2.10.5", + "@libp2p/kad-dht": "^15.1.10", + "@libp2p/logger": "^5.1.21", + "@libp2p/peer-id": "^5.1.8", + "@libp2p/tcp": "^10.1.18", + "@libp2p/utils": "^6.7.1", + "@multiformats/multiaddr": "^12.5.1", + "it-drain": "^3.0.10", + "it-length-prefixed": "^10.0.1", + "it-pushable": "^3.2.3", + "multiformats": "^13.4.0", + "uint8arrays": "^5.1.0" + }, + "devDependencies": { + "aegir": "^47.0.21", + "sinon-ts": "^2.0.0" + } +} diff --git a/packages/libp2p-daemon-server/src/dht.ts b/packages/libp2p-daemon-server/src/dht.ts new file mode 100644 index 0000000000..57536dbc53 --- /dev/null +++ b/packages/libp2p-daemon-server/src/dht.ts @@ -0,0 +1,153 @@ +/* eslint max-depth: ["error", 6] */ + +import { + DHTResponse +} from '@libp2p/daemon-protocol' +import { logger } from '@libp2p/logger' +import drain from 'it-drain' +import { ErrorResponse, OkResponse } from './responses.js' +import type { PeerId } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' +import type { CID } from 'multiformats/cid' + +const log = logger('libp2p:daemon-server:dht') + +export interface DHTOperationsInit { + dht: KadDHT +} + +export class DHTOperations { + private readonly dht: KadDHT + + constructor (init: DHTOperationsInit) { + const { dht } = init + + this.dht = dht + } + + async * provide (cid: CID): AsyncGenerator { + try { + await drain(this.dht.provide(cid)) + yield OkResponse() + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * getClosestPeers (key: Uint8Array): AsyncGenerator { + yield OkResponse({ + dht: { + type: DHTResponse.Type.BEGIN + } + }) + + for await (const event of this.dht.getClosestPeers(key)) { + if (event.name === 'PEER_RESPONSE') { + yield * event.closer.map(peer => DHTResponse.encode({ + type: DHTResponse.Type.VALUE, + value: peer.id.toMultihash().bytes + })) + } + } + + yield DHTResponse.encode({ + type: DHTResponse.Type.END + }) + } + + async * getPublicKey (peerId: PeerId): AsyncGenerator { + yield ErrorResponse(new Error('FIX ME: not implemented')) + } + + async * getValue (key: Uint8Array): AsyncGenerator { + try { + for await (const event of this.dht.get(key)) { + if (event.name === 'VALUE') { + yield OkResponse({ + dht: { + type: DHTResponse.Type.VALUE, + value: event.value + } + }) + } + } + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * putValue (key: Uint8Array, value: Uint8Array): AsyncGenerator { + try { + await drain(this.dht.put(key, value)) + + yield OkResponse() + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * findPeer (peerId: PeerId): AsyncGenerator { + try { + for await (const event of this.dht.findPeer(peerId)) { + if (event.name === 'FINAL_PEER') { + yield OkResponse({ + dht: { + type: DHTResponse.Type.VALUE, + peer: { + id: event.peer.id.toMultihash().bytes, + addrs: event.peer.multiaddrs.map(m => m.bytes) + } + } + }) + } + } + + throw new Error('Peer not found') + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * findProviders (cid: CID, count: number): AsyncGenerator { + yield OkResponse({ + dht: { + type: DHTResponse.Type.BEGIN + } + }) + + try { + const maxNumProviders = count + let found = 0 + + for await (const event of this.dht.findProviders(cid)) { + if (event.name === 'PEER_RESPONSE') { + for (const provider of event.providers) { + found++ + + yield DHTResponse.encode({ + type: DHTResponse.Type.VALUE, + peer: { + id: provider.id.toMultihash().bytes, + addrs: (provider.multiaddrs ?? []).map(m => m.bytes) + } + }) + } + + if (maxNumProviders === found) { + break + } + } + } + } catch (err: any) { + yield ErrorResponse(err) + } + + yield DHTResponse.encode({ + type: DHTResponse.Type.END + }) + } +} diff --git a/packages/libp2p-daemon-server/src/index.ts b/packages/libp2p-daemon-server/src/index.ts new file mode 100644 index 0000000000..d8501324a4 --- /dev/null +++ b/packages/libp2p-daemon-server/src/index.ts @@ -0,0 +1,552 @@ +/* eslint max-depth: ["error", 6] */ + +import { + Request, + Response, + DHTRequest, + PeerstoreRequest, + PSRequest, + StreamInfo +} from '@libp2p/daemon-protocol' +import { PassThroughUpgrader } from '@libp2p/daemon-protocol/upgrader' +import { defaultLogger, logger } from '@libp2p/logger' +import { peerIdFromMultihash } from '@libp2p/peer-id' +import { tcp } from '@libp2p/tcp' +import { pbStream, lpStream, pipe } from '@libp2p/utils' +import { multiaddr, protocols } from '@multiformats/multiaddr' +import * as lp from 'it-length-prefixed' +import { CID } from 'multiformats/cid' +import * as Digest from 'multiformats/hashes/digest' +import { DHTOperations } from './dht.js' +import { PubSubOperations } from './pubsub.js' +import { ErrorResponse, OkResponse } from './responses.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2p, Connection, MultiaddrConnection, Stream, Listener, Transport } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' +import type { Multiaddr } from '@multiformats/multiaddr' + +const LIMIT = 1 << 22 // 4MB +const log = logger('libp2p:daemon-server') + +export interface OpenStream { + streamInfo: StreamInfo + connection: Stream +} + +export interface DaemonInit { + multiaddr: Multiaddr + libp2pNode: Libp2p<{ dht: KadDHT, pubsub: GossipSub }> +} + +export interface Libp2pServer { + start(): Promise + stop(): Promise + getMultiaddr(): Multiaddr +} + +export class Server implements Libp2pServer { + private readonly multiaddr: Multiaddr + private readonly libp2p: Libp2p<{ dht: KadDHT, pubsub: GossipSub }> + private readonly tcp: Transport + private readonly listener: Listener + private readonly dhtOperations?: DHTOperations + private readonly pubsubOperations?: PubSubOperations + + constructor (init: DaemonInit) { + const { multiaddr, libp2pNode } = init + + this.multiaddr = multiaddr + this.libp2p = libp2pNode + this.tcp = tcp()({ + logger: defaultLogger() + }) + this.listener = this.tcp.createListener({ + upgrader: new PassThroughUpgrader(this.handleConnection.bind(this)) + }) + this._onExit = this._onExit.bind(this) + + if (libp2pNode.services.dht != null) { + this.dhtOperations = new DHTOperations({ dht: libp2pNode.services.dht }) + } + + if (libp2pNode.services.pubsub != null) { + this.pubsubOperations = new PubSubOperations({ pubsub: libp2pNode.services.pubsub }) + } + } + + /** + * Connects the daemons libp2p node to the peer provided + */ + async connect (request: Request): Promise { + if (request.connect?.addrs == null) { + throw new Error('Invalid request') + } + + const peer = request.connect.peer + const addrs = request.connect.addrs.map((a) => multiaddr(a)) + const peerId = peerIdFromMultihash(Digest.decode(peer)) + + log('connect - adding multiaddrs %a to peer %p', addrs, peerId) + await this.libp2p.peerStore.merge(peerId, { + multiaddrs: addrs + }) + + log('connect - dial %p', peerId) + return this.libp2p.dial(peerId) + } + + /** + * Opens a stream on one of the given protocols to the given peer + */ + async openStream (request: Request): Promise { + if (request.streamOpen?.proto == null) { + throw new Error('Invalid request') + } + + const { peer, proto } = request.streamOpen + const peerId = peerIdFromMultihash(Digest.decode(peer)) + + log('openStream - dial %p', peerId) + const connection = await this.libp2p.dial(peerId) + + log('openStream - open stream for protocol(s) %s', proto) + const stream = await connection.newStream(proto, { + runOnLimitedConnection: true + }) + + return { + streamInfo: { + peer: peerId.toMultihash().bytes, + addr: connection.remoteAddr.bytes, + proto: stream.protocol ?? '' + }, + connection: stream + } + } + + /** + * Sends inbound requests for the given protocol + * to the unix socket path provided. If an existing handler + * is registered at the path, it will be overridden. + */ + async registerStreamHandler (request: Request): Promise { + if (request.streamHandler?.proto == null) { + throw new Error('Invalid request') + } + + const protocols = request.streamHandler.proto + const addr = multiaddr(request.streamHandler.addr) + let conn: MultiaddrConnection + + log('registerStreamHandler - handle %s', protocols) + await this.libp2p.handle(protocols, (stream, connection) => { + Promise.resolve() + .then(async () => { + // Connect the client socket with the libp2p connection + // @ts-expect-error because we use a passthrough upgrader, + // this is actually a MultiaddrConnection and not a Connection + conn = await this.tcp.dial(addr, { + upgrader: new PassThroughUpgrader(), + signal: AbortSignal.timeout(10_000) + }) + + const message = StreamInfo.encode({ + peer: connection.remotePeer.toMultihash().bytes, + addr: connection.remoteAddr.bytes, + proto: stream.protocol ?? '' + }) + const encodedMessage = lp.encode.single(message) + + // Tell the client about the new connection + stream.send(encodedMessage) + + // And then begin piping the client and peer connection + // eslint-disable-next-line @typescript-eslint/await-thenable + await pipe( + stream, + conn, + stream + ) + }) + .catch(async err => { + log.error(err) + + if (conn != null) { + conn.abort(err) + } + }) + .finally(() => { + if (conn != null) { + conn.closeWrite() + .catch(err => { + log.error(err) + }) + } + }) + }, { + runOnLimitedConnection: true + }) + } + + /** + * Listens for process exit to handle cleanup + */ + _listen (): void { + // listen for graceful termination + process.on('SIGTERM', this._onExit) + process.on('SIGINT', this._onExit) + process.on('SIGHUP', this._onExit) + } + + _onExit (): void { + void this.stop({ exit: true }).catch(err => { + log.error(err) + }) + } + + /** + * Starts the daemon + */ + async start (): Promise { + this._listen() + await this.libp2p.start() + await this.listener.listen(this.multiaddr) + } + + getMultiaddr (): Multiaddr { + const addrs = this.listener.getAddrs() + + if (addrs.length > 0) { + return addrs[0] + } + + throw new Error('Not started') + } + + /** + * Stops the daemon + */ + async stop (options = { exit: false }): Promise { + await this.libp2p.stop() + await this.listener.close() + if (options.exit) { + log('server closed, exiting') + } + process.removeListener('SIGTERM', this._onExit) + process.removeListener('SIGINT', this._onExit) + process.removeListener('SIGHUP', this._onExit) + } + + async * handlePeerStoreRequest (request: PeerstoreRequest): AsyncGenerator { + try { + switch (request.type) { + case PeerstoreRequest.Type.GET_PROTOCOLS: + if (request.id == null) { + throw new Error('Invalid request') + } + + const peerId = peerIdFromMultihash(Digest.decode(request.id)) // eslint-disable-line no-case-declarations + const peer = await this.libp2p.peerStore.get(peerId) // eslint-disable-line no-case-declarations + const protos = peer.protocols // eslint-disable-line no-case-declarations + yield OkResponse({ peerStore: { protos } }) + return + case PeerstoreRequest.Type.GET_PEER_INFO: + throw new Error('ERR_NOT_IMPLEMENTED') + default: + throw new Error('ERR_INVALID_REQUEST_TYPE') + } + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + /** + * Parses and responds to PSRequests + */ + async * handlePubsubRequest (request: PSRequest): AsyncGenerator { + try { + if (this.libp2p.services.pubsub == null || (this.pubsubOperations == null)) { + throw new Error('PubSub not configured') + } + + switch (request.type) { + case PSRequest.Type.GET_TOPICS: + yield * this.pubsubOperations.getTopics() + return + case PSRequest.Type.SUBSCRIBE: + if (request.topic == null) { + throw new Error('Invalid request') + } + + yield * this.pubsubOperations.subscribe(request.topic) + return + case PSRequest.Type.PUBLISH: + if (request.topic == null || request.data == null) { + throw new Error('Invalid request') + } + + yield * this.pubsubOperations.publish(request.topic, request.data) + return + case PSRequest.Type.LIST_PEERS: + if (request.topic == null) { + throw new Error('Invalid request') + } + + yield * this.pubsubOperations.listPeers(request.topic) + return + default: + throw new Error('ERR_INVALID_REQUEST_TYPE') + } + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + /** + * Parses and responds to DHTRequests + */ + async * handleDHTRequest (request: DHTRequest): AsyncGenerator { + try { + if (this.libp2p.services.dht == null || (this.dhtOperations == null)) { + throw new Error('DHT not configured') + } + + switch (request.type) { + case DHTRequest.Type.FIND_PEER: + if (request.peer == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.findPeer(peerIdFromMultihash(Digest.decode(request.peer))) + return + case DHTRequest.Type.FIND_PROVIDERS: + if (request.cid == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.findProviders(CID.decode(request.cid), request.count ?? 20) + return + case DHTRequest.Type.PROVIDE: + if (request.cid == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.provide(CID.decode(request.cid)) + return + case DHTRequest.Type.GET_CLOSEST_PEERS: + if (request.key == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.getClosestPeers(request.key) + return + case DHTRequest.Type.GET_PUBLIC_KEY: + if (request.peer == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.getPublicKey(peerIdFromMultihash(Digest.decode(request.peer))) + return + case DHTRequest.Type.GET_VALUE: + if (request.key == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.getValue(request.key) + return + case DHTRequest.Type.PUT_VALUE: + if (request.key == null || request.value == null) { + throw new Error('Invalid request') + } + + yield * this.dhtOperations.putValue(request.key, request.value) + return + default: + throw new Error('ERR_INVALID_REQUEST_TYPE') + } + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + /** + * Handles requests for the given connection + */ + handleConnection (maConn: MultiaddrConnection): void { + void Promise.resolve().then(async () => { + const daemon = this + + let pb = pbStream(maConn, { + maxDataLength: LIMIT + }) + + const request = await pb.read(Request) + log('read', request) + + try { + switch (request.type) { + // Connect to another peer + case Request.Type.CONNECT: { + await daemon.connect(request) + await pb.write({ + type: Response.Type.OK + }, Response) + + break + } + // Get the daemon peer id and addresses + case Request.Type.IDENTIFY: { + await pb.write({ + type: Response.Type.OK, + identify: { + id: daemon.libp2p.peerId.toMultihash().bytes, + addrs: daemon.libp2p.getMultiaddrs().map(ma => ma.decapsulateCode(protocols('p2p').code)).map(m => m.bytes) + } + }, Response) + + break + } + // Get a list of our current peers + case Request.Type.LIST_PEERS: { + const peers = [] + const seen = new Set() + + for (const connection of daemon.libp2p.getConnections()) { + const peerId = connection.remotePeer.toString() + + if (seen.has(peerId)) { + continue + } + + seen.add(peerId) + + peers.push({ + id: connection.remotePeer.toMultihash().bytes, + addrs: [connection.remoteAddr.bytes] + }) + } + + await pb.write({ + type: Response.Type.OK, + peers + }, Response) + + break + } + case Request.Type.STREAM_OPEN: { + const response = await daemon.openStream(request) + + // write the response + await pb.write({ + type: Response.Type.OK, + streamInfo: response.streamInfo + }, Response) + + const stream = pb.unwrap() + + // then pipe the connection to the client + // eslint-disable-next-line @typescript-eslint/await-thenable + await pipe( + stream, + response.connection, + stream + ) + + // Exit the iterator, no more requests can come through + break + } + case Request.Type.STREAM_HANDLER: { + await daemon.registerStreamHandler(request) + + // write the response + await pb.write({ + type: Response.Type.OK + }, Response) + + break + } + case Request.Type.PEERSTORE: { + if (request.peerStore == null) { + throw new Error('ERR_INVALID_REQUEST') + } + + const stream = pb.unwrap() + const lp = lpStream(stream) + + for await (const buf of daemon.handlePeerStoreRequest(request.peerStore)) { + await lp.write(buf) + } + + break + } + case Request.Type.PUBSUB: { + if (request.pubsub == null) { + throw new Error('ERR_INVALID_REQUEST') + } + + const stream = pb.unwrap() + const lp = lpStream(stream) + + for await (const buf of daemon.handlePubsubRequest(request.pubsub)) { + await lp.write(buf) + } + + break + } + case Request.Type.DHT: { + if (request.dht == null) { + throw new Error('ERR_INVALID_REQUEST') + } + + const stream = pb.unwrap() + const lp = lpStream(stream) + + for await (const buf of daemon.handleDHTRequest(request.dht)) { + await lp.write(buf) + } + + break + } + // Not yet supported or doesn't exist + default: + throw new Error('ERR_INVALID_REQUEST_TYPE') + } + } catch (err: any) { + log.error(err) + + // recreate pb stream in case the original was unwrapped already + pb = pbStream(pb.unwrap(), { + maxDataLength: LIMIT + }) + + await pb.write({ + type: Response.Type.ERROR, + error: { + msg: err.message + }, + peers: [] + }, Response) + } finally { + await pb.unwrap().closeWrite() + } + }) + .catch(err => { + log.error('error handling incoming connection', err) + }) + } +} + +/** + * Creates a daemon from the provided Daemon Options + */ +export const createServer = (multiaddr: Multiaddr, libp2pNode: Libp2p<{ dht: KadDHT, pubsub: GossipSub }>): Libp2pServer => { + const daemon = new Server({ + multiaddr, + libp2pNode + }) + + return daemon +} diff --git a/packages/libp2p-daemon-server/src/pubsub.ts b/packages/libp2p-daemon-server/src/pubsub.ts new file mode 100644 index 0000000000..ba19fcca82 --- /dev/null +++ b/packages/libp2p-daemon-server/src/pubsub.ts @@ -0,0 +1,102 @@ +/* eslint max-depth: ["error", 6] */ + +import { publicKeyToProtobuf } from '@libp2p/crypto/keys' +import { + PSMessage +} from '@libp2p/daemon-protocol' +import { logger } from '@libp2p/logger' +import { pushable } from 'it-pushable' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { ErrorResponse, OkResponse } from './responses.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' + +const log = logger('libp2p:daemon-server:pubsub') + +export interface PubSubOperationsInit { + pubsub: GossipSub +} + +export class PubSubOperations { + private readonly pubsub: GossipSub + + constructor (init: PubSubOperationsInit) { + const { pubsub } = init + + this.pubsub = pubsub + } + + async * getTopics (): AsyncGenerator { + try { + yield OkResponse({ + pubsub: { + topics: this.pubsub.getTopics(), + peerIDs: [] + } + }) + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * subscribe (topic: string): AsyncGenerator { + try { + const onMessage = pushable() + this.pubsub.subscribe(topic) + + this.pubsub.addEventListener('message', (evt) => { + const msg = evt.detail + + if (msg.topic !== topic) { + return + } + + if (msg.type === 'signed') { + onMessage.push(PSMessage.encode({ + from: msg.from.toMultihash().bytes, + data: msg.data, + seqno: msg.sequenceNumber == null ? undefined : uint8ArrayFromString(msg.sequenceNumber.toString(16).padStart(16, '0'), 'base16'), + topicIDs: [msg.topic], + signature: msg.signature, + key: publicKeyToProtobuf(msg.key) + }).subarray()) + } else { + onMessage.push(PSMessage.encode({ + data: msg.data, + topicIDs: [msg.topic] + }).subarray()) + } + }) + + yield OkResponse() + yield * onMessage + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * publish (topic: string, data: Uint8Array): AsyncGenerator { + try { + await this.pubsub.publish(topic, data) + yield OkResponse() + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } + + async * listPeers (topic: string): AsyncGenerator { + try { + yield OkResponse({ + pubsub: { + topics: [topic], + peerIDs: this.pubsub.getSubscribers(topic).map(peer => peer.toMultihash().bytes) + } + }) + } catch (err: any) { + log.error(err) + yield ErrorResponse(err) + } + } +} diff --git a/packages/libp2p-daemon-server/src/responses.ts b/packages/libp2p-daemon-server/src/responses.ts new file mode 100644 index 0000000000..96b1b9fbb4 --- /dev/null +++ b/packages/libp2p-daemon-server/src/responses.ts @@ -0,0 +1,25 @@ +import { Response } from '@libp2p/daemon-protocol' + +/** + * Creates and encodes an OK response + */ +export function OkResponse (data?: Partial): Uint8Array { + return Response.encode({ + type: Response.Type.OK, + peers: [], + ...data + }).subarray() +} + +/** + * Creates and encodes an ErrorResponse + */ +export function ErrorResponse (err: Error): Uint8Array { + return Response.encode({ + type: Response.Type.ERROR, + error: { + msg: err.message + }, + peers: [] + }).subarray() +} diff --git a/packages/libp2p-daemon-server/test/index.spec.ts b/packages/libp2p-daemon-server/test/index.spec.ts new file mode 100644 index 0000000000..14db18b477 --- /dev/null +++ b/packages/libp2p-daemon-server/test/index.spec.ts @@ -0,0 +1,53 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 5] */ + +import { multiaddr } from '@multiformats/multiaddr' +import { expect } from 'aegir/chai' +import { stubInterface } from 'sinon-ts' +import { createServer } from '../src/index.js' +import type { GossipSub } from '@chainsafe/libp2p-gossipsub' +import type { Libp2p } from '@libp2p/interface' +import type { KadDHT } from '@libp2p/kad-dht' + +const ma = multiaddr('/ip4/0.0.0.0/tcp/0') + +describe('server', () => { + it('should start', async () => { + const libp2p = stubInterface>() + + const server = createServer(ma, libp2p) + + await server.start() + + expect(libp2p.start.called).to.be.true() + + await server.stop() + }) + + it('should stop', async () => { + const libp2p = stubInterface>() + + const server = createServer(ma, libp2p) + + await server.start() + await server.stop() + + expect(libp2p.stop.called).to.be.true() + }) + + it('should return multiaddrs', async () => { + const libp2p = stubInterface>() + + const server = createServer(ma, libp2p) + + expect(() => server.getMultiaddr()).to.throw(/Not started/) + + await server.start() + + expect(server.getMultiaddr()).to.be.ok() + + await server.stop() + + expect(() => server.getMultiaddr()).to.throw(/Not started/) + }) +}) diff --git a/packages/libp2p-daemon-server/tsconfig.json b/packages/libp2p-daemon-server/tsconfig.json new file mode 100644 index 0000000000..44ed7b45f6 --- /dev/null +++ b/packages/libp2p-daemon-server/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ], + "references": [ + { + "path": "../libp2p-daemon-protocol" + } + ] +} diff --git a/packages/libp2p-daemon-server/typedoc.json b/packages/libp2p-daemon-server/typedoc.json new file mode 100644 index 0000000000..db0b0747ef --- /dev/null +++ b/packages/libp2p-daemon-server/typedoc.json @@ -0,0 +1,6 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts" + ] +} diff --git a/packages/libp2p-daemon/.aegir.js b/packages/libp2p-daemon/.aegir.js new file mode 100644 index 0000000000..135a6a2211 --- /dev/null +++ b/packages/libp2p-daemon/.aegir.js @@ -0,0 +1,8 @@ + +export default { + build: { + config: { + platform: 'node' + } + } +} diff --git a/packages/libp2p-daemon/CHANGELOG.md b/packages/libp2p-daemon/CHANGELOG.md new file mode 100644 index 0000000000..fed2a15078 --- /dev/null +++ b/packages/libp2p-daemon/CHANGELOG.md @@ -0,0 +1,476 @@ +## [@libp2p/daemon-v5.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-5.0.3...@libp2p/daemon-5.0.4) (2025-08-13) + +### Bug Fixes + +* update all deps ([#348](https://github.com/libp2p/js-libp2p-daemon/issues/348)) ([44bf148](https://github.com/libp2p/js-libp2p-daemon/commit/44bf148f37b9f4091dc8ed26cf343e196607ebbc)) + +## [@libp2p/daemon-v5.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-5.0.2...@libp2p/daemon-5.0.3) (2025-08-13) + +### Dependencies + +* bump aegir from 46.0.5 to 47.0.21 ([#343](https://github.com/libp2p/js-libp2p-daemon/issues/343)) ([704c22f](https://github.com/libp2p/js-libp2p-daemon/commit/704c22f102362c6036642a73979d262e1214baa5)) + +## [@libp2p/daemon-v5.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-5.0.1...@libp2p/daemon-5.0.2) (2025-05-19) + +### Dependencies + +* bump aegir from 45.2.1 to 46.0.2 ([#297](https://github.com/libp2p/js-libp2p-daemon/issues/297)) ([09c1457](https://github.com/libp2p/js-libp2p-daemon/commit/09c1457ce93a45cab43869892cd9174617a34c29)) + +## [@libp2p/daemon-v5.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-5.0.0...@libp2p/daemon-5.0.1) (2024-11-08) + +### Bug Fixes + +* more mismatched versions ([327942a](https://github.com/libp2p/js-libp2p-daemon/commit/327942a44835533ebbc931ad33f03e8c1c9d4b28)) +* update project config ([4cf3a98](https://github.com/libp2p/js-libp2p-daemon/commit/4cf3a98dd76f8a41ef7f70d9e1696f2a06049f69)) + +### Dependencies + +* bump aegir from 44.1.4 to 45.0.1 ([#280](https://github.com/libp2p/js-libp2p-daemon/issues/280)) ([e322ae9](https://github.com/libp2p/js-libp2p-daemon/commit/e322ae99681bfbfdc9680f00694c381ba2ac37fa)) +* **dev:** bump sinon from 18.0.1 to 19.0.2 ([#276](https://github.com/libp2p/js-libp2p-daemon/issues/276)) ([278cb24](https://github.com/libp2p/js-libp2p-daemon/commit/278cb24ff8bc84cb83384ad7f4804eebccf6e445)) + +## @libp2p/daemon [4.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-4.0.1...@libp2p/daemon-4.0.2) (2024-08-01) + + +### Dependencies + +* **dev:** bump sinon from 17.0.2 to 18.0.0 ([#267](https://github.com/libp2p/js-libp2p-daemon/issues/267)) ([e9fb07a](https://github.com/libp2p/js-libp2p-daemon/commit/e9fb07a3b3350a6dcd8f4ff64fb63eba456dccaf)) + + + +### Dependencies + +* **@libp2p/daemon-server:** upgraded to 7.0.6 + +## @libp2p/daemon [4.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v4.0.0...@libp2p/daemon-4.0.1) (2024-02-07) + + +### Dependencies + +* bump aegir from 41.3.5 to 42.2.3 ([#262](https://github.com/libp2p/js-libp2p-daemon/issues/262)) ([2bb9733](https://github.com/libp2p/js-libp2p-daemon/commit/2bb97338d76e4cc48490326083fb13bd9ae60a74)) + + + +### Dependencies + +* **@libp2p/daemon-server:** upgraded to 7.0.3 + +## [@libp2p/daemon-v4.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v3.0.1...@libp2p/daemon-v4.0.0) (2023-11-30) + + +### ⚠ BREAKING CHANGES + +* updates js-libp2p to v1 + +### Trivial Changes + +* update sibling dependencies ([3e711d1](https://github.com/libp2p/js-libp2p-daemon/commit/3e711d137983192db220d549c768cb4d82cf7f73)) + + +### Dependencies + +* update libp2p to v1 ([#235](https://github.com/libp2p/js-libp2p-daemon/issues/235)) ([6f2917b](https://github.com/libp2p/js-libp2p-daemon/commit/6f2917b714756e3632ff6c522668f7c2166d4389)) + +## [@libp2p/daemon-v3.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v3.0.0...@libp2p/daemon-v3.0.1) (2023-11-02) + + +### Dependencies + +* bump aegir from 40.0.13 to 41.1.6 ([#232](https://github.com/libp2p/js-libp2p-daemon/issues/232)) ([653c74b](https://github.com/libp2p/js-libp2p-daemon/commit/653c74b6272fd6d11d686bf7bb44b49b6757b633)) +* **dev:** bump sinon from 15.2.0 to 17.0.1 ([#231](https://github.com/libp2p/js-libp2p-daemon/issues/231)) ([591b464](https://github.com/libp2p/js-libp2p-daemon/commit/591b464c3517948049dc169157333e774ca21f3c)) + +## [@libp2p/daemon-v3.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.9...@libp2p/daemon-v3.0.0) (2023-07-31) + + +### ⚠ BREAKING CHANGES + +* stream close methods are now asyc, requires libp2p@0.46.x or later + +### Features + +* close streams gracefully ([#213](https://github.com/libp2p/js-libp2p-daemon/issues/213)) ([92eebfa](https://github.com/libp2p/js-libp2p-daemon/commit/92eebfa12ba1fb42ae6c9e164fb0d69647e62074)) + + +### Dependencies + +* bump aegir from 38.1.8 to 39.0.1 ([#202](https://github.com/libp2p/js-libp2p-daemon/issues/202)) ([3bf4027](https://github.com/libp2p/js-libp2p-daemon/commit/3bf402752a92c3ebb96435eaa7923ce22ef76ea0)) +* update sibling dependencies ([fdfca91](https://github.com/libp2p/js-libp2p-daemon/commit/fdfca91afb258620c282296055e385b410018a6a)) + +## [@libp2p/daemon-v2.0.9](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.8...@libp2p/daemon-v2.0.9) (2023-04-19) + + +### Dependencies + +* update sibling dependencies ([db50405](https://github.com/libp2p/js-libp2p-daemon/commit/db50405ddec3a68ad265c3d3233595187bc4895d)) + +## [@libp2p/daemon-v2.0.8](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.7...@libp2p/daemon-v2.0.8) (2023-03-17) + + +### Dependencies + +* bump @multiformats/multiaddr from 11.6.1 to 12.0.0 ([#189](https://github.com/libp2p/js-libp2p-daemon/issues/189)) ([aaf7e2e](https://github.com/libp2p/js-libp2p-daemon/commit/aaf7e2e37423cae78cd16d8e16e06db40fdcd1e3)) + +## [@libp2p/daemon-v2.0.7](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.6...@libp2p/daemon-v2.0.7) (2023-02-22) + + +### Dependencies + +* bump aegir from 37.12.1 to 38.1.6 ([#183](https://github.com/libp2p/js-libp2p-daemon/issues/183)) ([6725a0a](https://github.com/libp2p/js-libp2p-daemon/commit/6725a0aeba9acb56a7530dece6c65a0f3eadfec5)) + +## [@libp2p/daemon-v2.0.6](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.5...@libp2p/daemon-v2.0.6) (2023-02-22) + + +### Trivial Changes + +* remove lerna ([#171](https://github.com/libp2p/js-libp2p-daemon/issues/171)) ([367f912](https://github.com/libp2p/js-libp2p-daemon/commit/367f9122f2fe1c31c8de7a136cda18d024ff08d7)) + + +### Dependencies + +* **dev:** bump sinon from 14.0.2 to 15.0.1 ([#166](https://github.com/libp2p/js-libp2p-daemon/issues/166)) ([1702efb](https://github.com/libp2p/js-libp2p-daemon/commit/1702efb4248bea4cb9ec19c694c1caae1c0ff16d)) + +## [@libp2p/daemon-v2.0.5](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.4...@libp2p/daemon-v2.0.5) (2023-01-07) + + +### Dependencies + +* update sibling dependencies ([775bd83](https://github.com/libp2p/js-libp2p-daemon/commit/775bd83a63ae99c4b892f0169f76dbe39163e2d4)) + +## [@libp2p/daemon-v2.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.3...@libp2p/daemon-v2.0.4) (2022-10-13) + + +### Dependencies + +* update uint8arrays, protons and multiformats ([#143](https://github.com/libp2p/js-libp2p-daemon/issues/143)) ([661139c](https://github.com/libp2p/js-libp2p-daemon/commit/661139c674c9994724e32227d7d9ae2c5da1cea2)) + +## [@libp2p/daemon-v2.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.2...@libp2p/daemon-v2.0.3) (2022-09-21) + + +### Dependencies + +* update @multiformats/multiaddr to 11.0.0 ([#128](https://github.com/libp2p/js-libp2p-daemon/issues/128)) ([885d901](https://github.com/libp2p/js-libp2p-daemon/commit/885d9013d82a62e6756b06350932df1242a13296)) + +## [@libp2p/daemon-v2.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.1...@libp2p/daemon-v2.0.2) (2022-09-09) + + +### Trivial Changes + +* update project config ([#111](https://github.com/libp2p/js-libp2p-daemon/issues/111)) ([345e663](https://github.com/libp2p/js-libp2p-daemon/commit/345e663e34278e780fc2f3a6b595294f925c4521)) + + +### Dependencies + +* update sibling dependencies ([56711c4](https://github.com/libp2p/js-libp2p-daemon/commit/56711c4f14b0cf2370b8612fe07d42ed2ac8363c)) + +## [@libp2p/daemon-v2.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v2.0.0...@libp2p/daemon-v2.0.1) (2022-06-15) + + +### Trivial Changes + +* update deps ([#103](https://github.com/libp2p/js-libp2p-daemon/issues/103)) ([2bfaa37](https://github.com/libp2p/js-libp2p-daemon/commit/2bfaa37e2f056dcd5de5a3882b77f52553c595d4)) + +## [@libp2p/daemon-v2.0.0](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v1.0.4...@libp2p/daemon-v2.0.0) (2022-06-15) + + +### ⚠ BREAKING CHANGES + +* uses new single-issue libp2p interface modules + +### Features + +* update to latest libp2p interfaces ([#102](https://github.com/libp2p/js-libp2p-daemon/issues/102)) ([f5e9121](https://github.com/libp2p/js-libp2p-daemon/commit/f5e91210654ab3c411e316c1c657356c037a0f6a)) + +## [@libp2p/daemon-v1.0.4](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v1.0.3...@libp2p/daemon-v1.0.4) (2022-05-25) + + +### Trivial Changes + +* update docs ([#91](https://github.com/libp2p/js-libp2p-daemon/issues/91)) ([5b072ff](https://github.com/libp2p/js-libp2p-daemon/commit/5b072ff89f30fd6cf55a3387bf0961c8ad78a22f)), closes [#83](https://github.com/libp2p/js-libp2p-daemon/issues/83) + +## [@libp2p/daemon-v1.0.3](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v1.0.2...@libp2p/daemon-v1.0.3) (2022-05-23) + + +### Bug Fixes + +* update deps ([#90](https://github.com/libp2p/js-libp2p-daemon/issues/90)) ([b50eba3](https://github.com/libp2p/js-libp2p-daemon/commit/b50eba3770e47969dbc30cbcf87c41672cd9c175)) + +## [@libp2p/daemon-v1.0.2](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v1.0.1...@libp2p/daemon-v1.0.2) (2022-04-20) + + +### Bug Fixes + +* update interfaces and deps ([#84](https://github.com/libp2p/js-libp2p-daemon/issues/84)) ([25173d5](https://github.com/libp2p/js-libp2p-daemon/commit/25173d5b2edf0e9dd9132707d349cdc862caecdb)) + +## [@libp2p/daemon-v1.0.1](https://github.com/libp2p/js-libp2p-daemon/compare/@libp2p/daemon-v1.0.0...@libp2p/daemon-v1.0.1) (2022-04-07) + + +### Trivial Changes + +* update aegir to latest version ([#80](https://github.com/libp2p/js-libp2p-daemon/issues/80)) ([3a98959](https://github.com/libp2p/js-libp2p-daemon/commit/3a98959617d9c19bba9fb064defee3d51acfcc29)) + +## @libp2p/daemon-v1.0.0 (2022-03-28) + + +### ⚠ BREAKING CHANGES + +* This module is now ESM only + +### Features + +* convert to typescript ([#78](https://github.com/libp2p/js-libp2p-daemon/issues/78)) ([f18b2a4](https://github.com/libp2p/js-libp2p-daemon/commit/f18b2a45871a2704db51b03e8583eefdcd13554c)) + +## [0.10.2](https://github.com/libp2p/js-libp2p-daemon/compare/v0.10.1...v0.10.2) (2022-01-26) + + + +## [0.10.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.10.0...v0.10.1) (2022-01-17) + + + +# [0.10.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.9.1...v0.10.0) (2022-01-17) + + +### Features + +* async peerstore ([#62](https://github.com/libp2p/js-libp2p-daemon/issues/62)) ([22e3cb0](https://github.com/libp2p/js-libp2p-daemon/commit/22e3cb05f7815f1f45e65398e87514f8ad961b49)) + + +### BREAKING CHANGES + +* peerstore methods are now all async + + + +## [0.9.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.9.0...v0.9.1) (2021-12-29) + + +### Bug Fixes + +* default nat hole punching to false ([#53](https://github.com/libp2p/js-libp2p-daemon/issues/53)) ([4bef1a3](https://github.com/libp2p/js-libp2p-daemon/commit/4bef1a384261fe442668f47b3799029cfb1043d3)) + + + +# [0.9.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.8.1...v0.9.0) (2021-12-29) + + +### chore + +* update deps ([#50](https://github.com/libp2p/js-libp2p-daemon/issues/50)) ([4231932](https://github.com/libp2p/js-libp2p-daemon/commit/42319320725fe248ee61a021981a5a065193ac99)) + + +### BREAKING CHANGES + +* only node15+ is supported + + + +## [0.8.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.8.0...v0.8.1) (2021-11-18) + + + +# [0.8.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.7.0...v0.8.0) (2021-11-18) + + +### chore + +* update dht ([#49](https://github.com/libp2p/js-libp2p-daemon/issues/49)) ([b1f1aaa](https://github.com/libp2p/js-libp2p-daemon/commit/b1f1aaab3466ec7ac693dcb5a211cd119aaa4f95)) + + +### BREAKING CHANGES + +* The DHT is now enabled by default + + + +# [0.7.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.6.1...v0.7.0) (2021-07-30) + + + +## [0.6.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.6.0...v0.6.1) (2021-06-11) + + + +# [0.6.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.5.2...v0.6.0) (2021-05-03) + + +### chore + +* update libp2p 0.31 ([#46](https://github.com/libp2p/js-libp2p-daemon/issues/46)) ([6625eba](https://github.com/libp2p/js-libp2p-daemon/commit/6625ebaa6027cee7cd8d08de09035f0edc894c1a)) + + +### BREAKING CHANGES + +* secio removed and noise is now default crypto, multiaddr@9 and libp2p@31 + + + +## [0.5.2](https://github.com/libp2p/js-libp2p-daemon/compare/v0.2.3...v0.5.2) (2021-02-16) + + +### Bug Fixes + +* replace node buffers with uint8arrays ([#41](https://github.com/libp2p/js-libp2p-daemon/issues/41)) ([cd009d5](https://github.com/libp2p/js-libp2p-daemon/commit/cd009d5e1f83724f907dd7f84239679633e8d197)) + + +### Features + +* add support for specifying noise ([#32](https://github.com/libp2p/js-libp2p-daemon/issues/32)) ([e5582cd](https://github.com/libp2p/js-libp2p-daemon/commit/e5582cdd00b7601cfe8ecc2b0d61a66bad71ab8a)) +* specify libp2p dependency through env ([#30](https://github.com/libp2p/js-libp2p-daemon/issues/30)) ([07b0695](https://github.com/libp2p/js-libp2p-daemon/commit/07b0695157da539774de75f2316748164fdbd72d)) + + + + +## [0.5.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.5.0...v0.5.1) (2020-08-26) + + + + +# [0.5.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.4.0...v0.5.0) (2020-08-23) + + +### Bug Fixes + +* replace node buffers with uint8arrays ([#41](https://github.com/libp2p/js-libp2p-daemon/issues/41)) ([cd009d5](https://github.com/libp2p/js-libp2p-daemon/commit/cd009d5)) + + +### BREAKING CHANGES + +* - All deps of this module now use uint8arrays in place of node buffers + +* chore: bump deps + +Co-authored-by: Jacob Heun + + + + +# [0.4.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.3.1...v0.4.0) (2020-06-05) + + + + +## [0.3.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.3.0...v0.3.1) (2020-04-22) + + +### Features + +* add support for specifying noise ([#32](https://github.com/libp2p/js-libp2p-daemon/issues/32)) ([e5582cd](https://github.com/libp2p/js-libp2p-daemon/commit/e5582cd)) + + + + +# [0.3.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.2.3...v0.3.0) (2020-01-31) + + +### Features + +* specify libp2p dependency through env ([#30](https://github.com/libp2p/js-libp2p-daemon/issues/30)) ([07b0695](https://github.com/libp2p/js-libp2p-daemon/commit/07b0695)) + + + + +## [0.2.3](https://github.com/libp2p/js-libp2p-daemon/compare/v0.2.2...v0.2.3) (2019-08-26) + + +### Bug Fixes + +* **tests:** fix secp256k1 test ([#26](https://github.com/libp2p/js-libp2p-daemon/issues/26)) ([fc46dbb](https://github.com/libp2p/js-libp2p-daemon/commit/fc46dbb)) + + +### Features + +* integrate gossipsub by default ([#19](https://github.com/libp2p/js-libp2p-daemon/issues/19)) ([2959fc8](https://github.com/libp2p/js-libp2p-daemon/commit/2959fc8)) + + + + +## [0.2.2](https://github.com/libp2p/js-libp2p-daemon/compare/v0.2.1...v0.2.2) (2019-07-10) + + +### Bug Fixes + +* **bin:** exit with status 1 on unhandled rejection ([#23](https://github.com/libp2p/js-libp2p-daemon/issues/23)) ([596005d](https://github.com/libp2p/js-libp2p-daemon/commit/596005d)) +* **main:** deal with unhandled rejections ([#20](https://github.com/libp2p/js-libp2p-daemon/issues/20)) ([49e685a](https://github.com/libp2p/js-libp2p-daemon/commit/49e685a)) +* **package.json:** fix main property ([#22](https://github.com/libp2p/js-libp2p-daemon/issues/22)) ([1d505b8](https://github.com/libp2p/js-libp2p-daemon/commit/1d505b8)) +* resolve loading of private key from file ([#21](https://github.com/libp2p/js-libp2p-daemon/issues/21)) ([3e70ace](https://github.com/libp2p/js-libp2p-daemon/commit/3e70ace)) + + + + +## [0.2.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.2.0...v0.2.1) (2019-04-29) + + +### Bug Fixes + +* peer info ([#17](https://github.com/libp2p/js-libp2p-daemon/issues/17)) ([69cf26b](https://github.com/libp2p/js-libp2p-daemon/commit/69cf26b)) + + +### Features + +* add support initial peerstore support ([#14](https://github.com/libp2p/js-libp2p-daemon/issues/14)) ([36a520c](https://github.com/libp2p/js-libp2p-daemon/commit/36a520c)) + + + + +# [0.2.0](https://github.com/libp2p/js-libp2p-daemon/compare/v0.1.2...v0.2.0) (2019-03-20) + + +### Bug Fixes + +* decapsulate ipfs protocol on daemon startup ([#11](https://github.com/libp2p/js-libp2p-daemon/issues/11)) ([190df09](https://github.com/libp2p/js-libp2p-daemon/commit/190df09)) + + +### Features + +* add pubsub support ([#12](https://github.com/libp2p/js-libp2p-daemon/issues/12)) ([5d27b90](https://github.com/libp2p/js-libp2p-daemon/commit/5d27b90)) +* add support for unix multiaddr listen ([#10](https://github.com/libp2p/js-libp2p-daemon/issues/10)) ([9106d68](https://github.com/libp2p/js-libp2p-daemon/commit/9106d68)) + + +### BREAKING CHANGES + +* The --sock param/flag has been replaced by --listen, which now expects a multiaddr string. + +Example: `jsp2pd --sock=/tmp/p2p.sock` would now be `jsp2pd --listen=/unix/tmp/p2p.sock` + +* feat: add support for unix multiaddr listen +* feat: add support for hostAddrs flag +* feat: add support for websockets +* feat: add announceAddrs support +* test: split up tests into files +* feat: use multiaddr instead of path for everything +* feat: update stream handler to use multiaddr bytes +* chore: fix lint +* chore: update multiaddr dep +* test: fix test runners +* fix: add a default host address +* fix: catch decapsulate errors when no ipfs present +* chore: fix feedback + + + + +## [0.1.2](https://github.com/libp2p/js-libp2p-daemon/compare/v0.1.1...v0.1.2) (2019-02-14) + + +### Bug Fixes + +* remove ipfs from identify multiaddrs ([7cee6ea](https://github.com/libp2p/js-libp2p-daemon/commit/7cee6ea)) + + + + +## [0.1.1](https://github.com/libp2p/js-libp2p-daemon/compare/v0.1.0...v0.1.1) (2019-02-13) + + +### Bug Fixes + +* connect should use peer id in bytes ([021b006](https://github.com/libp2p/js-libp2p-daemon/commit/021b006)) + + + + +# 0.1.0 (2019-01-31) + + +### Features + +* initial implementation of the libp2p daemon spec ([#1](https://github.com/libp2p/js-libp2p-daemon/issues/1)) ([383a6bd](https://github.com/libp2p/js-libp2p-daemon/commit/383a6bd)) diff --git a/packages/libp2p-daemon/CODE_OF_CONDUCT.md b/packages/libp2p-daemon/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..6b0fa54c54 --- /dev/null +++ b/packages/libp2p-daemon/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/libp2p-daemon/LICENSE-APACHE b/packages/libp2p-daemon/LICENSE-APACHE new file mode 100644 index 0000000000..b09cd7856d --- /dev/null +++ b/packages/libp2p-daemon/LICENSE-APACHE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/libp2p-daemon/LICENSE-MIT b/packages/libp2p-daemon/LICENSE-MIT new file mode 100644 index 0000000000..72dc60d84b --- /dev/null +++ b/packages/libp2p-daemon/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/libp2p-daemon/README.md b/packages/libp2p-daemon/README.md new file mode 100644 index 0000000000..908206c73c --- /dev/null +++ b/packages/libp2p-daemon/README.md @@ -0,0 +1,49 @@ +# @libp2p/daemon + +[![libp2p.io](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) +[![Discuss](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg?style=flat-square)](https://discuss.libp2p.io) +[![codecov](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-daemon.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-daemon) +[![CI](https://img.shields.io/github/actions/workflow/status/libp2p/js-libp2p-daemon/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/libp2p/js-libp2p-daemon/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> libp2p-daemon JavaScript implementation + +# Install + +```console +$ npm i @libp2p/daemon +``` + +# Specs + +The specs for the daemon are currently housed in the go implementation. You can read them at [libp2p/go-libp2p-daemon](https://github.com/libp2p/go-libp2p-daemon/blob/master/specs/README.md) + +# Usage + +```console +$ jsp2pd --help +``` + +For a full list of options, you can run help `jsp2pd --help`. +Running the defaults, `jsp2pd`, will start the daemon and bind it to a local unix socket path. +Daemon clients will be able to communicate with the daemon over that unix socket. + +As an alternative, you can use this daemon with a different version of libp2p as the one specified in `package.json`. You just need to define its path through an environment variable as follows: + +```console +$ LIBP2P_JS=/path/to/js-libp2p/src/index.js jsp2pd +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/libp2p/js-libp2p-daemon/blob/main/packages/libp2p-daemon/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/libp2p-daemon/package.json b/packages/libp2p-daemon/package.json new file mode 100644 index 0000000000..54461226a0 --- /dev/null +++ b/packages/libp2p-daemon/package.json @@ -0,0 +1,152 @@ +{ + "name": "@libp2p/daemon", + "version": "5.0.4", + "description": "libp2p-daemon JavaScript implementation", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/libp2p/js-libp2p-daemon/tree/main/packages/libp2p-daemon#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/libp2p/js-libp2p-daemon.git" + }, + "bugs": { + "url": "https://github.com/libp2p/js-libp2p-daemon/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "libp2p" + ], + "bin": { + "jsp2pd": "dist/src/index.js" + }, + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "build": "aegir build", + "test": "aegir test -t node", + "test:node": "aegir test -t node", + "release": "aegir release" + }, + "dependencies": { + "@libp2p/daemon-server": "^8.0.0", + "@multiformats/multiaddr": "^12.5.1", + "es-main": "^1.4.0", + "yargs": "^18.0.0", + "yargs-promise": "^1.1.0" + }, + "devDependencies": { + "@types/yargs": "^17.0.33", + "aegir": "^47.0.21", + "sinon": "^21.0.0" + } +} diff --git a/packages/libp2p-daemon/src/index.ts b/packages/libp2p-daemon/src/index.ts new file mode 100755 index 0000000000..0ae297bc66 --- /dev/null +++ b/packages/libp2p-daemon/src/index.ts @@ -0,0 +1,136 @@ +#! /usr/bin/env node +/* eslint no-console: ["error", { allow: ["log", "warn", "error"] }] */ + +import { multiaddr } from '@multiformats/multiaddr' +import esMain from 'es-main' +import yargs from 'yargs' +// @ts-expect-error no types +import YargsPromise from 'yargs-promise' +import type { Libp2pServer } from '@libp2p/daemon-server' +import type { Multiaddr } from '@multiformats/multiaddr' + +const args = process.argv.slice(2) +const parser = new YargsPromise(yargs) + +const log = console.log + +export default async function main (processArgs: string[]): Promise { + parser.yargs + .option('listen', { + desc: 'daemon control listen multiaddr', + type: 'string', + default: '/unix/tmp/p2pd.sock' + }) + .option('quiet', { + alias: 'q', + desc: 'be quiet', + type: 'boolean', + default: false + }) + .option('id', { + desc: 'peer identity; private key file', + type: 'string', + default: '' + }) + .option('hostAddrs', { + desc: 'Comma separated list of multiaddrs the host should listen on', + type: 'string', + default: '' + }) + .option('announceAddrs', { + desc: 'Comma separated list of multiaddrs the host should announce to the network', + type: 'string', + default: '' + }) + .option('bootstrap', { + alias: 'b', + desc: 'Connects to bootstrap peers and bootstraps the dht if enabled', + type: 'boolean', + default: false + }) + .option('bootstrapPeers', { + desc: 'Comma separated list of bootstrap peers; defaults to the IPFS DHT peers', + type: 'string', + default: '' + }) + .option('dht', { + desc: 'Enables the DHT in full node mode', + type: 'boolean', + default: false + }) + .option('dhtClient', { + desc: '(Not yet supported) Enables the DHT in client mode', + type: 'boolean', + default: false + }) + .option('nat', { + desc: 'Enables UPnP NAT hole punching', + type: 'boolean', + default: false + }) + .option('connMgr', { + desc: '(Not yet supported) Enables the Connection Manager', + type: 'boolean', + default: false + }) + .option('connMgrLo', { + desc: 'Number identifying the number of peers below which this node will not activate preemptive disconnections', + type: 'number' + }) + .option('connMgrHi', { + desc: 'Number identifying the maximum number of peers the current peer is willing to be connected to before is starts disconnecting', + type: 'number' + }) + .option('pubsub', { + desc: 'Enables pubsub', + type: 'boolean', + default: false + }) + .option('pubsubRouter', { + desc: 'Specifies the pubsub router implementation', + type: 'string', + default: 'gossipsub' + }) + .fail((msg: string, err: Error | undefined, yargs?: any) => { + if (err != null) { + throw err // preserve stack + } + + if (args.length > 0) { + log(msg) + } + + yargs.showHelp() + }) + + const { data, argv } = await parser.parse(processArgs) + + if (data != null) { + // Log help and exit + + log(data) + process.exit(0) + } + + const daemon = await createLibp2pServer(multiaddr(argv.listen), argv) + await daemon.start() + + if (argv.quiet !== true) { + log('daemon has started') + } +} + +export async function createLibp2pServer (listenAddr: Multiaddr, argv: any): Promise { + // const libp2p = await createLibp2p(argv) + // const daemon = await createServer(multiaddr(argv.listen), libp2p) + + throw new Error('Not implemented yet') +} + +if (esMain(import.meta)) { + main(process.argv) + .catch((err) => { + console.error(err) + process.exit(1) + }) +} diff --git a/packages/libp2p-daemon/test/cli.spec.ts b/packages/libp2p-daemon/test/cli.spec.ts new file mode 100644 index 0000000000..2712cb8ddd --- /dev/null +++ b/packages/libp2p-daemon/test/cli.spec.ts @@ -0,0 +1,85 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import sinon from 'sinon' +import cli from '../src/index.js' + +describe.skip('cli', () => { + const daemon = { createDaemon: (options: any) => {} } + + afterEach(() => { + sinon.restore() + }) + + it('should create a daemon with default options', async () => { + sinon.stub(daemon, 'createDaemon').callsFake((options) => { + expect(options).to.include({ + b: false, + bootstrap: false, + 'bootstrap-peers': '', + bootstrapPeers: '', + hostAddrs: '', + announceAddrs: '', + 'conn-mgr': false, + connMgr: false, + dht: false, + 'dht-client': false, + dhtClient: false, + id: '', + q: false, + quiet: false, + listen: '/unix/tmp/p2pd.sock' + }) + return { + start: () => {}, + stop: () => {} + } + }) + + await cli([ + '/bin/node', + '/daemon/src/cli/bin.js' + ]) + }) + + it('should be able to specify options', async () => { + sinon.stub(daemon, 'createDaemon').callsFake((options) => { + expect(options).to.include({ + b: true, + bootstrap: true, + 'bootstrap-peers': '/p2p/Qm1,/p2p/Qm2', + bootstrapPeers: '/p2p/Qm1,/p2p/Qm2', + hostAddrs: '/ip4/0.0.0.0/tcp/0,/ip4/0.0.0.0/tcp/0/wss', + announceAddrs: '/ip4/0.0.0.0/tcp/8080', + 'conn-mgr': true, + connMgr: true, + dht: true, + 'dht-client': true, + dhtClient: true, + id: '/path/to/key', + q: true, + quiet: true, + listen: '/unix/tmp/d.sock' + }) + return { + start: () => {}, + stop: () => {} + } + }) + + await cli([ + '/bin/node', + '/daemon/src/cli/bin.js', + '--dht=true', + '--b=true', + '--bootstrapPeers=/p2p/Qm1,/p2p/Qm2', + '--hostAddrs=/ip4/0.0.0.0/tcp/0,/ip4/0.0.0.0/tcp/0/wss', + '--announceAddrs=/ip4/0.0.0.0/tcp/8080', + '--connMgr=true', + '--dhtClient=true', + '--quiet=true', + '--id=/path/to/key', + '--listen=/unix/tmp/d.sock' + ]) + }) +}) diff --git a/packages/libp2p-daemon/tsconfig.json b/packages/libp2p-daemon/tsconfig.json new file mode 100644 index 0000000000..6b7a2ea443 --- /dev/null +++ b/packages/libp2p-daemon/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ], + "references": [ + { + "path": "../libp2p-daemon-server" + } + ] +} diff --git a/packages/libp2p-daemon/typedoc.json b/packages/libp2p-daemon/typedoc.json new file mode 100644 index 0000000000..db0b0747ef --- /dev/null +++ b/packages/libp2p-daemon/typedoc.json @@ -0,0 +1,6 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts" + ] +} diff --git a/packages/libp2p/package.json b/packages/libp2p/package.json index 4e9cf817ca..63a92431dc 100644 --- a/packages/libp2p/package.json +++ b/packages/libp2p/package.json @@ -95,7 +95,6 @@ "any-signal": "^4.1.1", "datastore-core": "^10.0.2", "interface-datastore": "^8.3.1", - "it-byte-stream": "^2.0.2", "it-merge": "^3.0.11", "it-parallel": "^3.0.11", "main-event": "^1.0.1", @@ -103,7 +102,7 @@ "p-defer": "^4.0.1", "p-retry": "^6.2.1", "progress-events": "^1.0.1", - "race-event": "^1.3.0", + "race-event": "^1.6.0", "race-signal": "^1.1.3", "uint8arrays": "^5.1.0" }, @@ -114,13 +113,12 @@ "it-drain": "^3.0.9", "it-length-prefixed": "^10.0.1", "it-map": "^3.1.3", - "it-pair": "^2.0.6", - "it-pipe": "^3.0.1", "it-take": "^3.0.8", "p-event": "^6.0.1", "p-wait-for": "^5.0.2", "sinon": "^20.0.0", "sinon-ts": "^2.0.0", + "uint8arraylist": "^2.4.8", "wherearewe": "^2.0.1" }, "browser": { diff --git a/packages/libp2p/src/address-manager/dns-mappings.ts b/packages/libp2p/src/address-manager/dns-mappings.ts index 841af6b886..24f46f50df 100644 --- a/packages/libp2p/src/address-manager/dns-mappings.ts +++ b/packages/libp2p/src/address-manager/dns-mappings.ts @@ -1,5 +1,4 @@ -import { isPrivateIp } from '@libp2p/utils/private-ip' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { isPrivateIp, trackedMap } from '@libp2p/utils' import { multiaddr, protocols } from '@multiformats/multiaddr' import type { AddressManagerComponents, AddressManagerInit } from './index.js' import type { Logger } from '@libp2p/interface' diff --git a/packages/libp2p/src/address-manager/index.ts b/packages/libp2p/src/address-manager/index.ts index 898e905b7b..d8c044761f 100644 --- a/packages/libp2p/src/address-manager/index.ts +++ b/packages/libp2p/src/address-manager/index.ts @@ -1,9 +1,7 @@ /* eslint-disable complexity */ import { isIPv4 } from '@chainsafe/is-ip' import { peerIdFromString } from '@libp2p/peer-id' -import { debounce } from '@libp2p/utils/debounce' -import { createScalableCuckooFilter } from '@libp2p/utils/filters' -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { debounce, createScalableCuckooFilter, isPrivateIp } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { QUIC_V1, TCP, WebSockets, WebSocketsSecure } from '@multiformats/multiaddr-matcher' import { DNSMappings } from './dns-mappings.js' @@ -12,7 +10,7 @@ import { ObservedAddresses } from './observed-addresses.js' import { TransportAddresses } from './transport-addresses.js' import type { ComponentLogger, Libp2pEvents, Logger, PeerId, PeerStore, Metrics } from '@libp2p/interface' import type { AddressManager as AddressManagerInterface, TransportManager, NodeAddress, ConfirmAddressOptions } from '@libp2p/interface-internal' -import type { Filter } from '@libp2p/utils/filters' +import type { Filter } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' import type { TypedEventTarget } from 'main-event' diff --git a/packages/libp2p/src/address-manager/ip-mappings.ts b/packages/libp2p/src/address-manager/ip-mappings.ts index b228a4e36d..e880d2e6f6 100644 --- a/packages/libp2p/src/address-manager/ip-mappings.ts +++ b/packages/libp2p/src/address-manager/ip-mappings.ts @@ -1,5 +1,5 @@ import { isIPv4 } from '@chainsafe/is-ip' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { trackedMap } from '@libp2p/utils' import { multiaddr, protocols } from '@multiformats/multiaddr' import type { AddressManagerComponents, AddressManagerInit } from './index.js' import type { Logger } from '@libp2p/interface' diff --git a/packages/libp2p/src/address-manager/observed-addresses.ts b/packages/libp2p/src/address-manager/observed-addresses.ts index 0bc30400c6..1e675b6f1f 100644 --- a/packages/libp2p/src/address-manager/observed-addresses.ts +++ b/packages/libp2p/src/address-manager/observed-addresses.ts @@ -1,6 +1,4 @@ -import { isLinkLocal } from '@libp2p/utils/multiaddr/is-link-local' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { isLinkLocal, isPrivate, trackedMap } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import type { AddressManagerComponents, AddressManagerInit } from './index.js' import type { Logger } from '@libp2p/interface' diff --git a/packages/libp2p/src/address-manager/transport-addresses.ts b/packages/libp2p/src/address-manager/transport-addresses.ts index cb75fdbae9..97ab63468c 100644 --- a/packages/libp2p/src/address-manager/transport-addresses.ts +++ b/packages/libp2p/src/address-manager/transport-addresses.ts @@ -1,6 +1,4 @@ -import { isNetworkAddress } from '@libp2p/utils/multiaddr/is-network-address' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { isNetworkAddress, isPrivate, trackedMap } from '@libp2p/utils' import type { AddressManagerComponents, AddressManagerInit } from './index.js' import type { Logger } from '@libp2p/interface' import type { NodeAddress } from '@libp2p/interface-internal' diff --git a/packages/libp2p/src/config.ts b/packages/libp2p/src/config.ts index 1017b7b5be..966d853d7a 100644 --- a/packages/libp2p/src/config.ts +++ b/packages/libp2p/src/config.ts @@ -1,5 +1,5 @@ import { FaultTolerance, InvalidParametersError } from '@libp2p/interface' -import { mergeOptions } from '@libp2p/utils/merge-options' +import { mergeOptions } from '@libp2p/utils' import { dnsaddrResolver } from './connection-manager/resolvers/dnsaddr.ts' import type { Libp2pInit } from './index.js' import type { ServiceMap } from '@libp2p/interface' diff --git a/packages/libp2p/src/config/connection-gater.browser.ts b/packages/libp2p/src/config/connection-gater.browser.ts index 42026ecf93..8a3eae4d34 100644 --- a/packages/libp2p/src/config/connection-gater.browser.ts +++ b/packages/libp2p/src/config/connection-gater.browser.ts @@ -1,4 +1,4 @@ -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { isPrivateIp } from '@libp2p/utils' import { WebSockets } from '@multiformats/multiaddr-matcher' import type { ConnectionGater } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' diff --git a/packages/libp2p/src/connection-manager/address-sorter.ts b/packages/libp2p/src/connection-manager/address-sorter.ts index 4dc16099a3..0ff7c83d0b 100644 --- a/packages/libp2p/src/connection-manager/address-sorter.ts +++ b/packages/libp2p/src/connection-manager/address-sorter.ts @@ -1,5 +1,4 @@ -import { isLoopback } from '@libp2p/utils/multiaddr/is-loopback' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' +import { isLoopback, isPrivate } from '@libp2p/utils' import { Circuit, WebSockets, WebSocketsSecure, WebRTC, WebRTCDirect, WebTransport, TCP } from '@multiformats/multiaddr-matcher' import type { Address } from '@libp2p/interface' diff --git a/packages/libp2p/src/connection-manager/connection-pruner.ts b/packages/libp2p/src/connection-manager/connection-pruner.ts index ce0716328e..6e0031777c 100644 --- a/packages/libp2p/src/connection-manager/connection-pruner.ts +++ b/packages/libp2p/src/connection-manager/connection-pruner.ts @@ -1,6 +1,5 @@ import { PeerMap } from '@libp2p/peer-collections' -import { safelyCloseConnectionIfUnused } from '@libp2p/utils/close' -import { multiaddrToIpNet } from './utils.js' +import { multiaddrToIpNet, safelyCloseConnectionIfUnused } from './utils.js' import type { IpNet } from '@chainsafe/netmask' import type { Libp2pEvents, Logger, ComponentLogger, PeerStore, Connection } from '@libp2p/interface' import type { ConnectionManager } from '@libp2p/interface-internal' diff --git a/packages/libp2p/src/connection-manager/dial-queue.ts b/packages/libp2p/src/connection-manager/dial-queue.ts index be314eb327..c75a0b428c 100644 --- a/packages/libp2p/src/connection-manager/dial-queue.ts +++ b/packages/libp2p/src/connection-manager/dial-queue.ts @@ -1,7 +1,7 @@ /* eslint-disable max-depth */ import { TimeoutError, DialError, AbortError } from '@libp2p/interface' import { PeerMap } from '@libp2p/peer-collections' -import { PriorityQueue } from '@libp2p/utils/priority-queue' +import { PriorityQueue } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { Circuit } from '@multiformats/multiaddr-matcher' import { anySignal } from 'any-signal' @@ -23,7 +23,7 @@ import { resolveMultiaddr, dnsaddrResolver } from './resolvers/index.js' import { DEFAULT_DIAL_PRIORITY } from './index.js' import type { AddressSorter, ComponentLogger, Logger, Connection, ConnectionGater, Metrics, PeerId, Address, PeerStore, PeerRouting, IsDialableOptions, OpenConnectionProgressEvents, MultiaddrResolver } from '@libp2p/interface' import type { OpenConnectionOptions, TransportManager } from '@libp2p/interface-internal' -import type { PriorityQueueJobOptions } from '@libp2p/utils/priority-queue' +import type { PriorityQueueJobOptions } from '@libp2p/utils' import type { DNS } from '@multiformats/dns' import type { Multiaddr } from '@multiformats/multiaddr' import type { ProgressOptions } from 'progress-events' @@ -103,7 +103,7 @@ export class DialQueue { // a started job errored this.queue.addEventListener('failure', (event) => { if (event.detail?.error.name !== AbortError.name) { - this.log.error('error in dial queue - %e', event.detail) + this.log.error('error in dial queue - %e', event.detail.error) } }) } diff --git a/packages/libp2p/src/connection-manager/index.ts b/packages/libp2p/src/connection-manager/index.ts index 8545d74373..7eeaa22c17 100644 --- a/packages/libp2p/src/connection-manager/index.ts +++ b/packages/libp2p/src/connection-manager/index.ts @@ -1,6 +1,6 @@ import { ConnectionClosedError, InvalidMultiaddrError, InvalidParametersError, InvalidPeerIdError, NotStartedError, start, stop } from '@libp2p/interface' import { PeerMap } from '@libp2p/peer-collections' -import { RateLimiter } from '@libp2p/utils/rate-limiter' +import { RateLimiter } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { CustomProgressEvent } from 'progress-events' import { getPeerAddress } from '../get-peer.js' @@ -13,7 +13,7 @@ import { multiaddrToIpNet } from './utils.js' import type { IpNet } from '@chainsafe/netmask' import type { PendingDial, AddressSorter, Libp2pEvents, AbortOptions, ComponentLogger, Logger, Connection, MultiaddrConnection, ConnectionGater, Metrics, PeerId, PeerStore, Startable, PendingDialStatus, PeerRouting, IsDialableOptions, MultiaddrResolver } from '@libp2p/interface' import type { ConnectionManager, OpenConnectionOptions, TransportManager } from '@libp2p/interface-internal' -import type { JobStatus } from '@libp2p/utils/queue' +import type { JobStatus } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' import type { TypedEventTarget } from 'main-event' @@ -614,7 +614,7 @@ export class DefaultConnectionManager implements ConnectionManager, Startable { ) } - async acceptIncomingConnection (maConn: MultiaddrConnection): Promise { + acceptIncomingConnection (maConn: MultiaddrConnection): boolean { // check deny list const denyConnection = this.deny.some(ma => { return ma.contains(maConn.remoteAddr.nodeAddress().address) @@ -646,7 +646,7 @@ export class DefaultConnectionManager implements ConnectionManager, Startable { const host = maConn.remoteAddr.nodeAddress().address try { - await this.inboundConnectionRateLimiter.consume(host, 1) + this.inboundConnectionRateLimiter.consume(host, 1) } catch { this.log('connection from %a refused - inboundConnectionThreshold exceeded by host %s', maConn.remoteAddr, host) return false diff --git a/packages/libp2p/src/connection-manager/reconnect-queue.ts b/packages/libp2p/src/connection-manager/reconnect-queue.ts index a8ac1b6f8d..5158f6ee73 100644 --- a/packages/libp2p/src/connection-manager/reconnect-queue.ts +++ b/packages/libp2p/src/connection-manager/reconnect-queue.ts @@ -1,5 +1,5 @@ import { KEEP_ALIVE } from '@libp2p/interface' -import { PeerQueue } from '@libp2p/utils/peer-queue' +import { PeerQueue } from '@libp2p/utils' import pRetry from 'p-retry' import { MAX_PARALLEL_RECONNECTS } from './constants.js' import type { ComponentLogger, Libp2pEvents, Logger, Metrics, Peer, PeerId, PeerStore, Startable } from '@libp2p/interface' diff --git a/packages/libp2p/src/connection-manager/utils.ts b/packages/libp2p/src/connection-manager/utils.ts index 8b8aef4e55..09ce3f63e6 100644 --- a/packages/libp2p/src/connection-manager/utils.ts +++ b/packages/libp2p/src/connection-manager/utils.ts @@ -1,8 +1,61 @@ import { multiaddr } from '@multiformats/multiaddr' import { convertToIpNet } from '@multiformats/multiaddr/convert' import type { IpNet } from '@chainsafe/netmask' +import type { Connection, AbortOptions } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' +/** + * These are speculative protocols that are run automatically on connection open + * so are usually not the reason the connection was opened. + * + * Consequently when requested it should be safe to close connections that only + * have these protocol streams open. + */ +const DEFAULT_CLOSABLE_PROTOCOLS = [ + // identify + '/ipfs/id/1.0.0', + + // identify-push + '/ipfs/id/push/1.0.0', + + // autonat + '/libp2p/autonat/1.0.0', + + // dcutr + '/libp2p/dcutr' +] + +export interface SafelyCloseConnectionOptions extends AbortOptions { + /** + * Only close the stream if it either has no protocol streams open or only + * ones in this list. + * + * @default ['/ipfs/id/1.0.0'] + */ + closableProtocols?: string[] +} + +/** + * Close the passed connection if it has no streams, or only closable protocol + * streams, falling back to aborting the connection if closing it cleanly fails. + */ +export async function safelyCloseConnectionIfUnused (connection?: Connection, options?: SafelyCloseConnectionOptions): Promise { + const streamProtocols = connection?.streams?.map(stream => stream.protocol) ?? [] + const closableProtocols = options?.closableProtocols ?? DEFAULT_CLOSABLE_PROTOCOLS + + // if the connection has protocols not in the closable protocols list, do not + // close the connection + if (streamProtocols.filter(proto => proto != null && !closableProtocols.includes(proto)).length > 0) { + return + } + + try { + await connection?.close(options) + } catch (err: any) { + connection?.abort(err) + } +} + /** * Converts a multiaddr string or object to an IpNet object. * If the multiaddr doesn't include /ipcidr, it will encapsulate with the appropriate CIDR: diff --git a/packages/libp2p/src/connection-monitor.ts b/packages/libp2p/src/connection-monitor.ts index b9dc1e7920..f4e7afcf60 100644 --- a/packages/libp2p/src/connection-monitor.ts +++ b/packages/libp2p/src/connection-monitor.ts @@ -1,11 +1,10 @@ import { randomBytes } from '@libp2p/crypto' import { serviceCapabilities } from '@libp2p/interface' -import { AdaptiveTimeout } from '@libp2p/utils/adaptive-timeout' -import { byteStream } from 'it-byte-stream' +import { AdaptiveTimeout, byteStream } from '@libp2p/utils' import { setMaxListeners } from 'main-event' import type { ComponentLogger, Logger, Metrics, Startable } from '@libp2p/interface' import type { ConnectionManager } from '@libp2p/interface-internal' -import type { AdaptiveTimeoutInit } from '@libp2p/utils/adaptive-timeout' +import type { AdaptiveTimeoutInit } from '@libp2p/utils' const DEFAULT_PING_INTERVAL_MS = 10000 const PROTOCOL_VERSION = '1.0.0' @@ -120,7 +119,7 @@ export class ConnectionMonitor implements Startable { conn.rtt = Date.now() - start - await bs.unwrap().close({ + await stream.closeWrite({ signal }) } catch (err: any) { diff --git a/packages/libp2p/src/connection.ts b/packages/libp2p/src/connection.ts index 4be51ff8a5..eab74cd41e 100644 --- a/packages/libp2p/src/connection.ts +++ b/packages/libp2p/src/connection.ts @@ -1,10 +1,11 @@ -import { connectionSymbol, LimitedConnectionError, ConnectionClosedError, ConnectionClosingError, TooManyOutboundProtocolStreamsError, TooManyInboundProtocolStreamsError } from '@libp2p/interface' +import { connectionSymbol, LimitedConnectionError, ConnectionClosedError, TooManyOutboundProtocolStreamsError, TooManyInboundProtocolStreamsError, StreamCloseEvent } from '@libp2p/interface' import * as mss from '@libp2p/multistream-select' -import { setMaxListeners } from 'main-event' +import { CODE_P2P } from '@multiformats/multiaddr' +import { setMaxListeners, TypedEventEmitter } from 'main-event' import { PROTOCOL_NEGOTIATION_TIMEOUT } from './connection-manager/constants.defaults.ts' import { MuxerUnavailableError } from './errors.ts' import { DEFAULT_MAX_INBOUND_STREAMS, DEFAULT_MAX_OUTBOUND_STREAMS } from './registrar.ts' -import type { AbortOptions, Logger, Direction, Connection as ConnectionInterface, Stream, ConnectionTimeline, ConnectionStatus, NewStreamOptions, PeerId, ConnectionLimits, StreamMuxerFactory, StreamMuxer, Metrics, PeerStore, MultiaddrConnection } from '@libp2p/interface' +import type { AbortOptions, Logger, MessageStreamDirection, Connection as ConnectionInterface, Stream, NewStreamOptions, PeerId, ConnectionLimits, StreamMuxer, Metrics, PeerStore, MultiaddrConnection, MessageStreamEvents, MultiaddrConnectionTimeline, ConnectionStatus, MessageStream } from '@libp2p/interface' import type { Registrar } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' @@ -19,10 +20,11 @@ export interface ConnectionComponents { export interface ConnectionInit { id: string maConn: MultiaddrConnection + stream: MessageStream remotePeer: PeerId - direction?: Direction - muxerFactory?: StreamMuxerFactory - encryption?: string + direction?: MessageStreamDirection + muxer?: StreamMuxer + cryptoProtocol?: string limits?: ConnectionLimits outboundStreamProtocolNegotiationTimeout?: number inboundStreamProtocolNegotiationTimeout?: number @@ -32,18 +34,17 @@ export interface ConnectionInit { * An implementation of the js-libp2p connection. * Any libp2p transport should use an upgrader to return this connection. */ -export class Connection implements ConnectionInterface { +export class Connection extends TypedEventEmitter implements ConnectionInterface { public readonly id: string public readonly remoteAddr: Multiaddr public readonly remotePeer: PeerId - public direction: Direction - public timeline: ConnectionTimeline + public direction: MessageStreamDirection + public timeline: MultiaddrConnectionTimeline public multiplexer?: string public encryption?: string public status: ConnectionStatus public limits?: ConnectionLimits public readonly log: Logger - public tags: string[] private readonly maConn: MultiaddrConnection private readonly muxer?: StreamMuxer @@ -52,6 +53,8 @@ export class Connection implements ConnectionInterface { private readonly inboundStreamProtocolNegotiationTimeout: number constructor (components: ConnectionComponents, init: ConnectionInit) { + super() + this.components = components this.id = init.id @@ -60,39 +63,28 @@ export class Connection implements ConnectionInterface { this.direction = init.direction ?? 'outbound' this.status = 'open' this.timeline = init.maConn.timeline - this.encryption = init.encryption + this.encryption = init.cryptoProtocol this.limits = init.limits this.maConn = init.maConn this.log = init.maConn.log this.outboundStreamProtocolNegotiationTimeout = init.outboundStreamProtocolNegotiationTimeout ?? PROTOCOL_NEGOTIATION_TIMEOUT this.inboundStreamProtocolNegotiationTimeout = init.inboundStreamProtocolNegotiationTimeout ?? PROTOCOL_NEGOTIATION_TIMEOUT - if (this.remoteAddr.getPeerId() == null) { + this.onIncomingStream = this.onIncomingStream.bind(this) + + if (this.remoteAddr.getComponents().find(component => component.code === CODE_P2P) == null) { this.remoteAddr = this.remoteAddr.encapsulate(`/p2p/${this.remotePeer}`) } - this.tags = [] - - if (init.muxerFactory != null) { - this.multiplexer = init.muxerFactory.protocol - - this.muxer = init.muxerFactory.createStreamMuxer({ - direction: this.direction, - log: this.log, - // Run anytime a remote stream is created - onIncomingStream: (stream) => { - this.onIncomingStream(stream) - } - }) - - // Pipe all data through the muxer - void Promise.all([ - this.muxer.sink(this.maConn.source), - this.maConn.sink(this.muxer.source) - ]).catch(err => { - this.log.error('error piping data through muxer - %e', err) - }) + if (init.muxer != null) { + this.multiplexer = init.muxer.protocol + this.muxer = init.muxer + this.muxer.addEventListener('stream', this.onIncomingStream) } + + this.maConn.addEventListener('close', (evt) => { + this.dispatchEvent(new StreamCloseEvent(evt.local, evt.error)) + }) } readonly [Symbol.toStringTag] = 'Connection' @@ -107,12 +99,8 @@ export class Connection implements ConnectionInterface { * Create a new stream over this connection */ newStream = async (protocols: string[], options: NewStreamOptions = {}): Promise => { - if (this.status === 'closing') { - throw new ConnectionClosingError('the connection is being closed') - } - - if (this.status === 'closed') { - throw new ConnectionClosedError('the connection is closed') + if (this.status !== 'open') { + throw new ConnectionClosedError(`The connection is "${this.status}" and not "open"`) } if (!Array.isArray(protocols)) { @@ -128,7 +116,14 @@ export class Connection implements ConnectionInterface { } this.log.trace('starting new stream for protocols %s', protocols) - const muxedStream = await this.muxer.newStream() + const muxedStream = await this.muxer.createStream({ + ...options, + + // most underlying transports only support negotiating a single protocol + // so only pass the early protocol if a single protocol has been requested + // otherwise fall back to mss + protocol: protocols.length === 1 ? protocols[0] : undefined + }) this.log.trace('started new stream %s for protocols %s', muxedStream.id, protocols) try { @@ -144,24 +139,21 @@ export class Connection implements ConnectionInterface { } } - muxedStream.log.trace('selecting protocol from protocols %s', protocols) + if (muxedStream.protocol === '') { + muxedStream.log.trace('selecting protocol from protocols %s', protocols) - const { - stream, - protocol - } = await mss.select(muxedStream, protocols, { - ...options, - log: muxedStream.log, - yieldBytes: true - }) + muxedStream.protocol = await mss.select(muxedStream, protocols, options) - muxedStream.log('selected protocol %s', protocol) + muxedStream.log('negotiated protocol %s', muxedStream.protocol) + } else { + muxedStream.log('pre-negotiated protocol %s', muxedStream.protocol) + } - const outgoingLimit = findOutgoingStreamLimit(protocol, this.components.registrar, options) - const streamCount = countStreams(protocol, 'outbound', this) + const outgoingLimit = findOutgoingStreamLimit(muxedStream.protocol, this.components.registrar, options) + const streamCount = countStreams(muxedStream.protocol, 'outbound', this) - if (streamCount >= outgoingLimit) { - const err = new TooManyOutboundProtocolStreamsError(`Too many outbound protocol streams for protocol "${protocol}" - ${streamCount}/${outgoingLimit}`) + if (streamCount > outgoingLimit) { + const err = new TooManyOutboundProtocolStreamsError(`Too many outbound protocol streams for protocol "${muxedStream.protocol}" - ${streamCount}/${outgoingLimit}`) muxedStream.abort(err) throw err @@ -170,129 +162,87 @@ export class Connection implements ConnectionInterface { // If a protocol stream has been successfully negotiated and is to be passed to the application, // the peer store should ensure that the peer is registered with that protocol await this.components.peerStore.merge(this.remotePeer, { - protocols: [protocol] + protocols: [muxedStream.protocol] }) - // after the handshake the returned stream can have early data so override - // the source/sink - muxedStream.source = stream.source - muxedStream.sink = stream.sink - muxedStream.protocol = protocol - - // allow closing the write end of a not-yet-negotiated stream - if (stream.closeWrite != null) { - muxedStream.closeWrite = stream.closeWrite - } - - // allow closing the read end of a not-yet-negotiated stream - if (stream.closeRead != null) { - muxedStream.closeRead = stream.closeRead - } - - // make sure we don't try to negotiate a stream we are closing - if (stream.close != null) { - muxedStream.close = stream.close - } - - this.components.metrics?.trackProtocolStream(muxedStream, this) - - muxedStream.direction = 'outbound' + this.components.metrics?.trackProtocolStream(muxedStream) return muxedStream } catch (err: any) { - this.log.error('could not create new outbound stream on connection %s %a for protocols %s - %e', this.direction === 'inbound' ? 'from' : 'to', this.remoteAddr, protocols, err) - - if (muxedStream.timeline.close == null) { + if (muxedStream.status === 'open') { muxedStream.abort(err) + } else { + this.log.error('could not create new outbound stream on connection %s %a for protocols %s - %e', this.direction === 'inbound' ? 'from' : 'to', this.remoteAddr, protocols, err) } throw err } } - private onIncomingStream (muxedStream: Stream): void { + private async onIncomingStream (evt: CustomEvent): Promise { + this.log('new incoming stream %s', evt.detail.id) + const muxedStream = evt.detail + const signal = AbortSignal.timeout(this.inboundStreamProtocolNegotiationTimeout) setMaxListeners(Infinity, signal) - void Promise.resolve() - .then(async () => { - const protocols = this.components.registrar.getProtocols() - - const { stream, protocol } = await mss.handle(muxedStream, protocols, { - signal, - log: muxedStream.log, - yieldBytes: false - }) - - this.log('incoming %s stream opened', protocol) + this.log('start protocol negotiation %s', evt.detail.id) - const incomingLimit = findIncomingStreamLimit(protocol, this.components.registrar) - const streamCount = countStreams(protocol, 'inbound', this) - - if (streamCount === incomingLimit) { - const err = new TooManyInboundProtocolStreamsError(`Too many inbound protocol streams for protocol "${protocol}" - limit ${incomingLimit}`) - muxedStream.abort(err) - - throw err - } - - // after the handshake the returned stream can have early data so override - // the source/sink - muxedStream.source = stream.source - muxedStream.sink = stream.sink - muxedStream.protocol = protocol - - // allow closing the write end of a not-yet-negotiated stream - if (stream.closeWrite != null) { - muxedStream.closeWrite = stream.closeWrite - } - - // allow closing the read end of a not-yet-negotiated stream - if (stream.closeRead != null) { - muxedStream.closeRead = stream.closeRead - } + try { + if (muxedStream.protocol === '') { + const protocols = this.components.registrar.getProtocols() - // make sure we don't try to negotiate a stream we are closing - if (stream.close != null) { - muxedStream.close = stream.close - } + muxedStream.log.trace('selecting protocol from protocols %s', protocols) - // If a protocol stream has been successfully negotiated and is to be passed to the application, - // the peer store should ensure that the peer is registered with that protocol - await this.components.peerStore.merge(this.remotePeer, { - protocols: [protocol] - }, { + muxedStream.protocol = await mss.handle(muxedStream, protocols, { signal }) - this.components.metrics?.trackProtocolStream(muxedStream, this) + muxedStream.log('negotiated protocol %s', muxedStream.protocol) + } else { + muxedStream.log('pre-negotiated protocol %s', muxedStream.protocol) + } - const { handler, options } = this.components.registrar.getHandler(protocol) + const incomingLimit = findIncomingStreamLimit(muxedStream.protocol, this.components.registrar) + const streamCount = countStreams(muxedStream.protocol, 'inbound', this) - if (this.limits != null && options.runOnLimitedConnection !== true) { - throw new LimitedConnectionError('Cannot open protocol stream on limited connection') - } + if (streamCount > incomingLimit) { + throw new TooManyInboundProtocolStreamsError(`Too many inbound protocol streams for protocol "${muxedStream.protocol}" - limit ${incomingLimit}`) + } - await handler({ connection: this, stream: muxedStream }) + // If a protocol stream has been successfully negotiated and is to be passed to the application, + // the peer store should ensure that the peer is registered with that protocol + await this.components.peerStore.merge(this.remotePeer, { + protocols: [muxedStream.protocol] + }, { + signal }) - .catch(async err => { - this.log.error('error handling incoming stream id %s - %e', muxedStream.id, err) - muxedStream.abort(err) - }) + this.components.metrics?.trackProtocolStream(muxedStream) + + const { handler, options } = this.components.registrar.getHandler(muxedStream.protocol) + + if (this.limits != null && options.runOnLimitedConnection !== true) { + throw new LimitedConnectionError('Cannot open protocol stream on limited connection') + } + + await handler(muxedStream, this) + } catch (err: any) { + muxedStream.abort(err) + } } /** * Close the connection */ async close (options: AbortOptions = {}): Promise { - if (this.status === 'closed' || this.status === 'closing') { + if (this.status !== 'open') { return } this.log('closing connection to %a', this.remoteAddr) - this.status = 'closing' + this.status = 'closed' if (options.signal == null) { const signal = AbortSignal.timeout(CLOSE_TIMEOUT) @@ -307,11 +257,8 @@ export class Connection implements ConnectionInterface { try { this.log.trace('closing underlying transport') - // ensure remaining streams are closed gracefully - await this.muxer?.close(options) - // close the underlying transport - await this.maConn.close(options) + await this.maConn.closeWrite(options) this.log.trace('updating timeline with close time') @@ -324,22 +271,14 @@ export class Connection implements ConnectionInterface { } abort (err: Error): void { - if (this.status === 'closed') { + if (this.status !== 'open') { return } - this.log.error('aborting connection to %a due to error', this.remoteAddr, err) - - this.status = 'closing' - - // ensure remaining streams are aborted - this.muxer?.abort(err) + this.status = 'aborted' // abort the underlying transport this.maConn.abort(err) - - this.status = 'closed' - this.timeline.close = Date.now() } } @@ -347,11 +286,13 @@ export function createConnection (components: ConnectionComponents, init: Connec return new Connection(components, init) } -function findIncomingStreamLimit (protocol: string, registrar: Registrar): number | undefined { +function findIncomingStreamLimit (protocol: string, registrar: Registrar): number { try { const { options } = registrar.getHandler(protocol) - return options.maxInboundStreams + if (options.maxInboundStreams != null) { + return options.maxInboundStreams + } } catch (err: any) { if (err.name !== 'UnhandledProtocolError') { throw err diff --git a/packages/libp2p/src/peer-routing.ts b/packages/libp2p/src/peer-routing.ts index 3740e917eb..6cf435b86e 100644 --- a/packages/libp2p/src/peer-routing.ts +++ b/packages/libp2p/src/peer-routing.ts @@ -1,5 +1,5 @@ import { NotFoundError } from '@libp2p/interface' -import { createScalableCuckooFilter } from '@libp2p/utils/filters' +import { createScalableCuckooFilter } from '@libp2p/utils' import merge from 'it-merge' import parallel from 'it-parallel' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' diff --git a/packages/libp2p/src/registrar.ts b/packages/libp2p/src/registrar.ts index c4f10cb111..61a86ea715 100644 --- a/packages/libp2p/src/registrar.ts +++ b/packages/libp2p/src/registrar.ts @@ -1,7 +1,6 @@ import { InvalidParametersError } from '@libp2p/interface' -import { mergeOptions } from '@libp2p/utils/merge-options' -import { trackedMap } from '@libp2p/utils/tracked-map' -import * as errorsJs from './errors.js' +import { mergeOptions, trackedMap } from '@libp2p/utils' +import { DuplicateProtocolHandlerError, UnhandledProtocolError } from './errors.js' import type { IdentifyResult, Libp2pEvents, Logger, PeerUpdate, PeerId, PeerStore, Topology, StreamHandler, StreamHandlerRecord, StreamHandlerOptions, AbortOptions, Metrics } from '@libp2p/interface' import type { Registrar as RegistrarInterface } from '@libp2p/interface-internal' import type { ComponentLogger } from '@libp2p/logger' @@ -68,7 +67,7 @@ export class Registrar implements RegistrarInterface { const handler = this.handlers.get(protocol) if (handler == null) { - throw new errorsJs.UnhandledProtocolError(`No handler registered for protocol ${protocol}`) + throw new UnhandledProtocolError(`No handler registered for protocol ${protocol}`) } return handler @@ -91,7 +90,7 @@ export class Registrar implements RegistrarInterface { */ async handle (protocol: string, handler: StreamHandler, opts?: StreamHandlerOptions): Promise { if (this.handlers.has(protocol) && opts?.force !== true) { - throw new errorsJs.DuplicateProtocolHandlerError(`Handler already registered for protocol ${protocol}`) + throw new DuplicateProtocolHandlerError(`Handler already registered for protocol ${protocol}`) } const options = mergeOptions.bind({ ignoreUndefined: true })({ diff --git a/packages/libp2p/src/transport-manager.ts b/packages/libp2p/src/transport-manager.ts index fa469cf68b..919aa9663e 100644 --- a/packages/libp2p/src/transport-manager.ts +++ b/packages/libp2p/src/transport-manager.ts @@ -1,5 +1,5 @@ import { FaultTolerance, InvalidParametersError, NotStartedError } from '@libp2p/interface' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { trackedMap } from '@libp2p/utils' import { IP4, IP6 } from '@multiformats/multiaddr-matcher' import { CustomProgressEvent } from 'progress-events' import { TransportUnavailableError, UnsupportedListenAddressError, UnsupportedListenAddressesError } from './errors.js' diff --git a/packages/libp2p/src/upgrader.ts b/packages/libp2p/src/upgrader.ts index 7db1817524..ff0a503a0f 100644 --- a/packages/libp2p/src/upgrader.ts +++ b/packages/libp2p/src/upgrader.ts @@ -1,7 +1,7 @@ import { InvalidMultiaddrError, InvalidPeerIdError } from '@libp2p/interface' import * as mss from '@libp2p/multistream-select' import { peerIdFromString } from '@libp2p/peer-id' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { trackedMap } from '@libp2p/utils' import { anySignal } from 'any-signal' import { setMaxListeners } from 'main-event' import { CustomProgressEvent } from 'progress-events' @@ -9,7 +9,7 @@ import { raceSignal } from 'race-signal' import { PROTOCOL_NEGOTIATION_TIMEOUT, INBOUND_UPGRADE_TIMEOUT } from './connection-manager/constants.js' import { createConnection } from './connection.js' import { ConnectionDeniedError, ConnectionInterceptedError, EncryptionFailedError, MuxerUnavailableError } from './errors.js' -import type { Libp2pEvents, AbortOptions, ComponentLogger, MultiaddrConnection, Connection, ConnectionProtector, ConnectionEncrypter, SecuredConnection, ConnectionGater, Metrics, PeerId, PeerStore, StreamMuxerFactory, Upgrader as UpgraderInterface, UpgraderOptions, ConnectionLimits, SecureConnectionOptions, CounterGroup, ClearableSignal } from '@libp2p/interface' +import type { Libp2pEvents, AbortOptions, ComponentLogger, MultiaddrConnection, Connection, ConnectionProtector, ConnectionEncrypter, ConnectionGater, Metrics, PeerId, PeerStore, StreamMuxerFactory, Upgrader as UpgraderInterface, UpgraderOptions, ConnectionLimits, CounterGroup, ClearableSignal, MessageStream, SecuredConnection, StreamMuxer, UpgraderWithoutEncryptionOptions } from '@libp2p/interface' import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { TypedEventTarget } from 'main-event' @@ -17,17 +17,22 @@ interface CreateConnectionOptions { id: string cryptoProtocol: string direction: 'inbound' | 'outbound' + + /** + * The raw underlying connection + */ maConn: MultiaddrConnection - upgradedConn: MultiaddrConnection + + /** + * The encrypted, multiplexed connection + */ + stream: MessageStream + remotePeer: PeerId - muxerFactory?: StreamMuxerFactory + muxer?: StreamMuxer limits?: ConnectionLimits } -export interface CryptoResult extends SecuredConnection { - protocol: string -} - export interface UpgraderInit { connectionEncrypters: ConnectionEncrypter[] streamMuxers: StreamMuxerFactory[] @@ -68,6 +73,10 @@ export interface UpgraderComponents { logger: ComponentLogger } +interface EncryptedConnection extends SecuredConnection { + protocol: string +} + type ConnectionDeniedType = keyof Pick export class Upgrader implements UpgraderInterface { @@ -148,7 +157,9 @@ export class Upgrader implements UpgraderInterface { /** * Upgrades an inbound connection */ - async upgradeInbound (maConn: MultiaddrConnection, opts: UpgraderOptions): Promise { + async upgradeInbound (maConn: MultiaddrConnection, opts: UpgraderOptions): Promise + async upgradeInbound (maConn: MultiaddrConnection, opts: UpgraderWithoutEncryptionOptions): Promise + async upgradeInbound (maConn: MultiaddrConnection, opts: UpgraderOptions | UpgraderWithoutEncryptionOptions): Promise { let accepted = false // always apply upgrade timeout for incoming upgrades @@ -159,7 +170,7 @@ export class Upgrader implements UpgraderInterface { inbound: true }) - accepted = await raceSignal(this.components.connectionManager.acceptIncomingConnection(maConn), signal) + accepted = this.components.connectionManager.acceptIncomingConnection(maConn) if (!accepted) { throw new ConnectionDeniedError('Connection denied') @@ -192,7 +203,9 @@ export class Upgrader implements UpgraderInterface { /** * Upgrades an outbound connection */ - async upgradeOutbound (maConn: MultiaddrConnection, opts: UpgraderOptions): Promise { + async upgradeOutbound (maConn: MultiaddrConnection, opts: UpgraderOptions): Promise + async upgradeOutbound (maConn: MultiaddrConnection, opts: UpgraderWithoutEncryptionOptions): Promise + async upgradeOutbound (maConn: MultiaddrConnection, opts: UpgraderOptions | UpgraderWithoutEncryptionOptions): Promise { try { this.metrics.dials?.increment({ outbound: true @@ -226,11 +239,11 @@ export class Upgrader implements UpgraderInterface { } } - private async _performUpgrade (maConn: MultiaddrConnection, direction: 'inbound' | 'outbound', opts: UpgraderOptions): Promise { - let encryptedConn: MultiaddrConnection + private async _performUpgrade (maConn: MultiaddrConnection, direction: 'inbound' | 'outbound', opts: UpgraderOptions | UpgraderWithoutEncryptionOptions): Promise { + let stream: MessageStream = maConn let remotePeer: PeerId - let upgradedConn: MultiaddrConnection let muxerFactory: StreamMuxerFactory | undefined + let muxer: StreamMuxer | undefined let cryptoProtocol const id = `${(parseInt(String(Math.random() * 1e9))).toString(36)}${Date.now()}` @@ -241,50 +254,36 @@ export class Upgrader implements UpgraderInterface { maConn.log.trace('starting the %s connection upgrade', direction) // Protect - let protectedConn = maConn - if (opts?.skipProtection !== true) { const protector = this.components.connectionProtector if (protector != null) { maConn.log('protecting the %s connection', direction) - protectedConn = await protector.protect(maConn, opts) + stream = await protector.protect(stream, opts) } } try { // Encrypt the connection - encryptedConn = protectedConn - if (opts?.skipEncryption !== true) { + if (isEncryptionSkipped(opts)) { + if (opts.remotePeer == null) { + throw new InvalidMultiaddrError(`${direction} connection that skipped encryption must have a peer id`) + } + + cryptoProtocol = 'native' + remotePeer = opts.remotePeer + } else { opts?.onProgress?.(new CustomProgressEvent(`upgrader:encrypt-${direction}-connection`)); ({ - conn: encryptedConn, + connection: stream, remotePeer, protocol: cryptoProtocol, streamMuxer: muxerFactory } = await (direction === 'inbound' - ? this._encryptInbound(protectedConn, opts) - : this._encryptOutbound(protectedConn, opts) + ? this._encryptInbound(stream, opts) + : this._encryptOutbound(stream, opts) )) - - const maConn: MultiaddrConnection = { - ...protectedConn, - ...encryptedConn - } - - await this.shouldBlockConnection(direction === 'inbound' ? 'denyInboundEncryptedConnection' : 'denyOutboundEncryptedConnection', remotePeer, maConn) - } else { - const idStr = maConn.remoteAddr.getPeerId() - - if (idStr == null) { - throw new InvalidMultiaddrError(`${direction} connection that skipped encryption must have a peer id`) - } - - const remotePeerId = peerIdFromString(idStr) - - cryptoProtocol = 'native' - remotePeer = remotePeerId } // this can happen if we dial a multiaddr without a peer id, we only find @@ -295,44 +294,42 @@ export class Upgrader implements UpgraderInterface { throw err } - upgradedConn = encryptedConn + await this.shouldBlockConnection(direction === 'inbound' ? 'denyInboundEncryptedConnection' : 'denyOutboundEncryptedConnection', remotePeer, maConn) + if (opts?.muxerFactory != null) { muxerFactory = opts.muxerFactory } else if (muxerFactory == null && this.streamMuxers.size > 0) { opts?.onProgress?.(new CustomProgressEvent(`upgrader:multiplex-${direction}-connection`)) // Multiplex the connection - const multiplexed = await (direction === 'inbound' - ? this._multiplexInbound({ - ...protectedConn, - ...encryptedConn - }, this.streamMuxers, opts) - : this._multiplexOutbound({ - ...protectedConn, - ...encryptedConn - }, this.streamMuxers, opts)) - muxerFactory = multiplexed.muxerFactory - upgradedConn = multiplexed.stream + muxerFactory = await (direction === 'inbound' + ? this._multiplexInbound(stream, this.streamMuxers, opts) + : this._multiplexOutbound(stream, this.streamMuxers, opts)) } } catch (err: any) { - maConn.log.error('failed to upgrade inbound connection %s %a - %e', direction === 'inbound' ? 'from' : 'to', maConn.remoteAddr, err) + maConn.log.error('failed to upgrade %s connection %s %a - %e', direction, direction === 'inbound' ? 'from' : 'to', maConn.remoteAddr, err) throw err } await this.shouldBlockConnection(direction === 'inbound' ? 'denyInboundUpgradedConnection' : 'denyOutboundUpgradedConnection', remotePeer, maConn) + // create the connection muxer if one is configured + if (muxerFactory != null) { + muxer = muxerFactory.createStreamMuxer(stream) + } + const conn = this._createConnection({ id, cryptoProtocol, direction, maConn, - upgradedConn, - muxerFactory, + stream, + muxer, remotePeer, limits: opts?.limits }) - conn.log('successfully upgraded %s connection', direction) + conn.log('successfully upgraded connection') return conn } @@ -341,91 +338,49 @@ export class Upgrader implements UpgraderInterface { * A convenience method for generating a new `Connection` */ _createConnection (opts: CreateConnectionOptions): Connection { - const { - id, - cryptoProtocol, - direction, - maConn, - upgradedConn, - remotePeer, - muxerFactory, - limits - } = opts - - let connection: Connection // eslint-disable-line prefer-const - - const _timeline = maConn.timeline - maConn.timeline = new Proxy(_timeline, { - set: (...args) => { - if (args[1] === 'close' && args[2] != null && _timeline.close == null) { - // Wait for close to finish before notifying of the closure - (async () => { - try { - if (connection.status === 'open') { - await connection.close() - } - } catch (err: any) { - connection.log.error('error closing connection after timeline close %e', err) - } finally { - this.events.safeDispatchEvent('connection:close', { - detail: connection - }) - } - })().catch(err => { - connection.log.error('error thrown while dispatching connection:close event %e', err) - }) - } - - return Reflect.set(...args) - } - }) - maConn.timeline.upgraded = Date.now() - // Create the connection - connection = createConnection(this.components, { - id, - maConn: upgradedConn, - remotePeer, - direction, - muxerFactory, - encryption: cryptoProtocol, - limits, + const connection = createConnection(this.components, { + ...opts, outboundStreamProtocolNegotiationTimeout: this.outboundStreamProtocolNegotiationTimeout, inboundStreamProtocolNegotiationTimeout: this.inboundStreamProtocolNegotiationTimeout }) + connection.addEventListener('close', () => { + this.events.safeDispatchEvent('connection:close', { + detail: connection + }) + }) + this.events.safeDispatchEvent('connection:open', { detail: connection }) + opts.maConn.timeline.upgraded = Date.now() + return connection } /** * Attempts to encrypt the incoming `connection` with the provided `cryptos` */ - async _encryptInbound (connection: MultiaddrConnection, options?: AbortOptions): Promise { + async _encryptInbound (connection: MessageStream, options?: AbortOptions): Promise { const protocols = Array.from(this.connectionEncrypters.keys()) try { - const { stream, protocol } = await mss.handle(connection, protocols, { - ...options, - log: connection.log - }) + const protocol = await mss.handle(connection, protocols, options) const encrypter = this.connectionEncrypters.get(protocol) if (encrypter == null) { throw new EncryptionFailedError(`no crypto module found for ${protocol}`) } - connection.log('encrypting inbound connection to %a using %s', connection.remoteAddr, protocol) + connection.log('encrypting inbound connection using %s', protocol) return { - ...await encrypter.secureInbound(stream, options), + ...await encrypter.secureInbound(connection, options), protocol } } catch (err: any) { - connection.log.error('encrypting inbound connection from %a failed', connection.remoteAddr, err) throw new EncryptionFailedError(err.message) } } @@ -434,31 +389,26 @@ export class Upgrader implements UpgraderInterface { * Attempts to encrypt the given `connection` with the provided connection encrypters. * The first `ConnectionEncrypter` module to succeed will be used */ - async _encryptOutbound (connection: MultiaddrConnection, options: SecureConnectionOptions): Promise { + async _encryptOutbound (connection: MessageStream, options?: AbortOptions): Promise { const protocols = Array.from(this.connectionEncrypters.keys()) try { connection.log.trace('selecting encrypter from %s', protocols) - const { stream, protocol } = await mss.select(connection, protocols, { - ...options, - log: connection.log, - yieldBytes: true - }) + const protocol = await mss.select(connection, protocols, options) const encrypter = this.connectionEncrypters.get(protocol) if (encrypter == null) { throw new EncryptionFailedError(`no crypto module found for ${protocol}`) } - connection.log('encrypting outbound connection to %a using %s', connection.remoteAddr, protocol) + connection.log('encrypting outbound connection using %s', protocol) return { - ...await encrypter.secureOutbound(stream, options), + ...await encrypter.secureOutbound(connection, options), protocol } } catch (err: any) { - connection.log.error('encrypting outbound connection to %a failed', connection.remoteAddr, err) throw new EncryptionFailedError(err.message) } } @@ -467,27 +417,23 @@ export class Upgrader implements UpgraderInterface { * Selects one of the given muxers via multistream-select. That * muxer will be used for all future streams on the connection. */ - async _multiplexOutbound (connection: MultiaddrConnection, muxers: Map, options: AbortOptions): Promise<{ stream: MultiaddrConnection, muxerFactory?: StreamMuxerFactory }> { + async _multiplexOutbound (maConn: MessageStream, muxers: Map, options: AbortOptions): Promise { const protocols = Array.from(muxers.keys()) - connection.log('outbound selecting muxer %s', protocols) - try { - connection.log.trace('selecting stream muxer from %s', protocols) - - const { - stream, - protocol - } = await mss.select(connection, protocols, { - ...options, - log: connection.log, - yieldBytes: true - }) + maConn.log('outbound selecting muxer %s', protocols) - connection.log('selected %s as muxer protocol', protocol) + try { + maConn.log.trace('selecting stream muxer from %s', protocols) + const protocol = await mss.select(maConn, protocols, options) const muxerFactory = muxers.get(protocol) - return { stream, muxerFactory } + if (muxerFactory == null) { + throw new MuxerUnavailableError(`No muxer configured for protocol "${protocol}"`) + } + + maConn.log('selected %s as muxer protocol', protocol) + return muxerFactory } catch (err: any) { - connection.log.error('error multiplexing outbound connection', err) + maConn.log.error('error multiplexing outbound connection', err) throw new MuxerUnavailableError(String(err)) } } @@ -496,20 +442,23 @@ export class Upgrader implements UpgraderInterface { * Registers support for one of the given muxers via multistream-select. The * selected muxer will be used for all future streams on the connection. */ - async _multiplexInbound (connection: MultiaddrConnection, muxers: Map, options: AbortOptions): Promise<{ stream: MultiaddrConnection, muxerFactory?: StreamMuxerFactory }> { + async _multiplexInbound (maConn: MessageStream, muxers: Map, options: AbortOptions): Promise { const protocols = Array.from(muxers.keys()) - connection.log('inbound handling muxers %s', protocols) + maConn.log('inbound handling muxers %s', protocols) try { - const { stream, protocol } = await mss.handle(connection, protocols, { - ...options, - log: connection.log - }) + maConn.log.trace('selecting stream muxer from %s', protocols) + const protocol = await mss.handle(maConn, protocols, options) const muxerFactory = muxers.get(protocol) - return { stream, muxerFactory } + if (muxerFactory == null) { + throw new MuxerUnavailableError(`No muxer configured for protocol "${protocol}"`) + } + + maConn.log('selected %s as muxer protocol', protocol) + return muxerFactory } catch (err: any) { - connection.log.error('error multiplexing inbound connection', err) - throw new MuxerUnavailableError(String(err)) + maConn.log.error('error multiplexing inbound connection', err) + throw err } } @@ -521,3 +470,7 @@ export class Upgrader implements UpgraderInterface { return this.streamMuxers } } + +function isEncryptionSkipped (opts?: any): opts is UpgraderWithoutEncryptionOptions { + return opts.skipEncryption === true +} diff --git a/packages/libp2p/src/utils.ts b/packages/libp2p/src/utils.ts new file mode 100644 index 0000000000..598ed33779 --- /dev/null +++ b/packages/libp2p/src/utils.ts @@ -0,0 +1,31 @@ +import type { MessageStream } from '@libp2p/interface' + +export function redirect (channelA: MessageStream, channelB: MessageStream): void { + channelA.addEventListener('message', (evt) => { + const sendMore = channelB.send(evt.data) + + if (sendMore === false) { + channelA.pause() + + channelA.addEventListener('drain', () => { + channelA.resume() + }, { + once: true + }) + } + }) + + channelB.addEventListener('message', (evt) => { + const sendMore = channelA.send(evt.data) + + if (sendMore === false) { + channelB.pause() + + channelB.addEventListener('drain', () => { + channelB.resume() + }, { + once: true + }) + } + }) +} diff --git a/packages/libp2p/test/connection-manager/connection-gater.spec.ts b/packages/libp2p/test/connection-manager/connection-gater.spec.ts index 0a090afedc..15beb30da7 100644 --- a/packages/libp2p/test/connection-manager/connection-gater.spec.ts +++ b/packages/libp2p/test/connection-manager/connection-gater.spec.ts @@ -8,6 +8,7 @@ import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' +import { Uint8ArrayList } from 'uint8arraylist' import { DefaultConnectionManager } from '../../src/connection-manager/index.js' import { Upgrader } from '../../src/upgrader.js' import { createDefaultUpgraderComponents } from '../upgrading/utils.js' @@ -81,6 +82,7 @@ describe('connection-gater', () => { await expect(upgrader.upgradeInbound(maConn, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface(), signal: AbortSignal.timeout(5_000) @@ -111,6 +113,7 @@ describe('connection-gater', () => { await expect(upgrader.upgradeOutbound(maConn, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface(), signal: AbortSignal.timeout(5_000) @@ -133,9 +136,10 @@ describe('connection-gater', () => { }) upgrader._encryptInbound = async (maConn) => { return { - conn: maConn, + connection: maConn, remotePeer, - protocol: '/test-encrypter' + protocol: '/test-encrypter', + earlyData: new Uint8ArrayList() } } @@ -170,7 +174,7 @@ describe('connection-gater', () => { }) upgrader._encryptOutbound = async (maConn) => { return { - conn: maConn, + connection: maConn, remotePeer, protocol: '/test-encrypter' } @@ -212,6 +216,7 @@ describe('connection-gater', () => { await expect(upgrader.upgradeInbound(maConn, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface(), signal: AbortSignal.timeout(5_000) @@ -242,6 +247,7 @@ describe('connection-gater', () => { await expect(upgrader.upgradeOutbound(maConn, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface(), signal: AbortSignal.timeout(5_000) diff --git a/packages/libp2p/test/connection-manager/index.spec.ts b/packages/libp2p/test/connection-manager/index.spec.ts index 933495d888..b7983f276e 100644 --- a/packages/libp2p/test/connection-manager/index.spec.ts +++ b/packages/libp2p/test/connection-manager/index.spec.ts @@ -140,8 +140,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should allow connections from allowlist multiaddrs (IPv6)', async () => { @@ -169,8 +169,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.true() }) it('should deny connections from denylist multiaddrs (IPv6)', async () => { @@ -187,8 +187,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should deny connections when maxConnections is exceeded', async () => { @@ -212,8 +212,8 @@ describe('Connection Manager', () => { remoteAddr: multiaddr('/ip4/83.13.55.32/tcp/59283') }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should deny connections from peers that connect too frequently', async () => { @@ -234,12 +234,12 @@ describe('Connection Manager', () => { remoteAddr: multiaddr('/ip4/34.4.63.125/tcp/4001') }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.true() // connect again within a second - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should allow connections from allowlist multiaddrs (IPv4)', async () => { @@ -267,8 +267,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.true() }) it('should allow connections from allowlist subnet (IPv4)', async () => { @@ -286,8 +286,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.true() }) it('should deny connections from denylist subnet (IPv4)', async () => { @@ -304,8 +304,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should allow connections from allowlist subnet (IPv6)', async () => { @@ -323,8 +323,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.true() }) it('should deny connections from denylist subnet (IPv6)', async () => { @@ -341,8 +341,8 @@ describe('Connection Manager', () => { remoteAddr }) - await expect(connectionManager.acceptIncomingConnection(maConn)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn)) + .to.be.false() }) it('should limit the number of inbound pending connections', async () => { @@ -362,8 +362,8 @@ describe('Connection Manager', () => { remoteAddr: multiaddr('/ip4/34.4.63.125/tcp/4001') }) - await expect(connectionManager.acceptIncomingConnection(maConn1)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn1)) + .to.be.true() // start the upgrade const maConn2 = stubInterface({ @@ -371,15 +371,15 @@ describe('Connection Manager', () => { }) // should be false because we have not completed the upgrade of maConn1 - await expect(connectionManager.acceptIncomingConnection(maConn2)) - .to.eventually.be.false() + expect(connectionManager.acceptIncomingConnection(maConn2)) + .to.be.false() // finish the maConn1 pending upgrade connectionManager.afterUpgradeInbound() // should be true because we have now completed the upgrade of maConn1 - await expect(connectionManager.acceptIncomingConnection(maConn2)) - .to.eventually.be.true() + expect(connectionManager.acceptIncomingConnection(maConn2)) + .to.be.true() }) it('should allow dialing peers when an existing limited connection exists', async () => { diff --git a/packages/utils/test/close.spec.ts b/packages/libp2p/test/connection-manager/utils.spec.ts similarity index 77% rename from packages/utils/test/close.spec.ts rename to packages/libp2p/test/connection-manager/utils.spec.ts index 83d3adf494..6045cef158 100644 --- a/packages/utils/test/close.spec.ts +++ b/packages/libp2p/test/connection-manager/utils.spec.ts @@ -1,46 +1,9 @@ import { expect } from 'aegir/chai' import { stubInterface } from 'sinon-ts' -import { safelyCloseConnectionIfUnused, safelyCloseStream } from '../src/close.js' +import { safelyCloseConnectionIfUnused } from '../../src/connection-manager/utils.js' import type { Connection, Stream } from '@libp2p/interface' describe('closing', () => { - describe('streams', () => { - it('should close a stream', async () => { - const stream = stubInterface() - - await safelyCloseStream(stream) - - expect(stream.close.called).to.be.true() - }) - - it('should pass options to a stream when closing', async () => { - const options = {} - const stream = stubInterface() - - await safelyCloseStream(stream, options) - - expect(stream.close.calledWith(options)).to.be.true() - }) - - it('should abort a stream when closing fails', async () => { - const err = new Error('Urk!') - - const stream = stubInterface({ - close: () => { - throw err - } - }) - - await safelyCloseStream(stream) - - expect(stream.abort.calledWith(err)).to.be.true() - }) - - it('should not error when no stream is passed', async () => { - await safelyCloseStream() - }) - }) - describe('connections', () => { it('should close a connection', async () => { const connection = stubInterface({ diff --git a/packages/libp2p/test/connection-monitor/index.spec.ts b/packages/libp2p/test/connection-monitor/index.spec.ts index 1dfa8e4cf8..da56d88b19 100644 --- a/packages/libp2p/test/connection-monitor/index.spec.ts +++ b/packages/libp2p/test/connection-monitor/index.spec.ts @@ -2,9 +2,9 @@ import { ConnectionClosedError, UnsupportedProtocolError, start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' +import { echoStream } from '@libp2p/utils' import { expect } from 'aegir/chai' import delay from 'delay' -import { pair } from 'it-pair' import { stubInterface } from 'sinon-ts' import { ConnectionMonitor } from '../../src/connection-monitor.js' import type { ComponentLogger, Stream, Connection } from '@libp2p/interface' @@ -39,9 +39,7 @@ describe('connection monitor', () => { await start(monitor) const connection = stubInterface() - const stream = stubInterface({ - ...pair() - }) + const stream = await echoStream() connection.newStream.withArgs('/ipfs/ping/1.0.0').resolves(stream) components.connectionManager.getConnections.returns([connection]) @@ -60,9 +58,7 @@ describe('connection monitor', () => { await start(monitor) const connection = stubInterface() - const stream = stubInterface({ - ...pair() - }) + const stream = await echoStream() connection.newStream.withArgs('/foobar/ping/1.0.0').resolves(stream) components.connectionManager.getConnections.returns([connection]) diff --git a/packages/libp2p/test/connection/compliance.spec.ts b/packages/libp2p/test/connection/compliance.spec.ts deleted file mode 100644 index 59b3634dda..0000000000 --- a/packages/libp2p/test/connection/compliance.spec.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { expect } from 'aegir/chai' -import Sinon from 'sinon' -import { createConnection } from '../../src/connection.js' -import { defaultConnectionComponents, defaultConnectionInit, ECHO_PROTOCOL } from './utils.ts' -import type { ConnectionComponents, ConnectionInit } from '../../src/connection.js' -import type { Connection } from '@libp2p/interface' - -describe('connection - compliance', () => { - let components: ConnectionComponents - let init: ConnectionInit - let connection: Connection - let timelineProxy - const proxyHandler = { - set () { - // @ts-expect-error - TS fails to infer here - return Reflect.set(...arguments) - } - } - - beforeEach(async () => { - components = defaultConnectionComponents() - init = await defaultConnectionInit() - - connection = createConnection(components, init) - - timelineProxy = new Proxy({ - open: Date.now() - 10, - upgraded: Date.now() - }, proxyHandler) - - connection.timeline = timelineProxy - }) - - it('should have properties set', () => { - expect(connection.id).to.exist() - expect(connection.remotePeer).to.exist() - expect(connection.remoteAddr).to.exist() - expect(connection.status).to.equal('open') - expect(connection.timeline.open).to.exist() - expect(connection.timeline.close).to.not.exist() - expect(connection.direction).to.exist() - expect(connection.streams).to.eql([]) - expect(connection.tags).to.eql([]) - }) - - it('should get the metadata of an open connection', () => { - expect(connection.status).to.equal('open') - expect(connection.direction).to.exist() - expect(connection.timeline.open).to.exist() - expect(connection.timeline.close).to.not.exist() - }) - - it('should return an empty array of streams', () => { - const streams = connection.streams - - expect(streams).to.eql([]) - }) - - it('should be able to create a new stream', async () => { - expect(connection.streams).to.be.empty() - - const protocolToUse = '/echo/0.0.1' - const stream = await connection.newStream([protocolToUse]) - - expect(stream).to.have.property('protocol', protocolToUse) - - const connStreams = connection.streams - - expect(stream).to.exist() - expect(connStreams).to.exist() - expect(connStreams).to.have.lengthOf(1) - expect(connStreams[0]).to.equal(stream) - }) - - it('should be able to close the connection after being created', async () => { - expect(connection.timeline.close).to.not.exist() - await connection.close() - - expect(connection.timeline.close).to.exist() - expect(connection.status).to.equal('closed') - }) - - it('should be able to close the connection after opening a stream', async () => { - // Open stream - const protocol = ECHO_PROTOCOL - await connection.newStream([protocol]) - - // Close connection - expect(connection.timeline.close).to.not.exist() - await connection.close() - - expect(connection.timeline.close).to.exist() - expect(connection.status).to.equal('closed') - }) - - it('should properly track streams', async () => { - // Open stream - const protocol = ECHO_PROTOCOL - const stream = await connection.newStream([protocol]) - expect(stream).to.have.property('protocol', protocol) - - // Close stream - await stream.close() - - expect(connection.streams.filter(s => s.id === stream.id)).to.be.empty() - }) - - it('should track outbound streams', async () => { - // Open stream - const protocol = ECHO_PROTOCOL - const stream = await connection.newStream(protocol) - expect(stream).to.have.property('direction', 'outbound') - }) - - it('should support a proxy on the timeline', async () => { - Sinon.spy(proxyHandler, 'set') - expect(connection.timeline.close).to.not.exist() - - await connection.close() - // @ts-expect-error - fails to infer callCount - expect(proxyHandler.set.callCount).to.equal(1) - // @ts-expect-error - fails to infer getCall - const [obj, key, value] = proxyHandler.set.getCall(0).args - expect(obj).to.eql(connection.timeline) - expect(key).to.equal('close') - expect(value).to.be.a('number').that.equals(connection.timeline.close) - }) - - it('should fail to create a new stream if the connection is closing', async () => { - expect(connection.timeline.close).to.not.exist() - const p = connection.close() - - try { - const protocol = ECHO_PROTOCOL - await connection.newStream([protocol]) - } catch (err: any) { - expect(err).to.exist() - return - } finally { - await p - } - - throw new Error('should fail to create a new stream if the connection is closing') - }) - - it('should fail to create a new stream if the connection is closed', async () => { - expect(connection.timeline.close).to.not.exist() - await connection.close() - - await expect(connection.newStream(['/echo/0.0.1'])).to.eventually.be.rejected - .with.property('name', 'ConnectionClosedError') - }) -}) diff --git a/packages/libp2p/test/connection/index.spec.ts b/packages/libp2p/test/connection/index.spec.ts index 65326ac5c0..018f029f9a 100644 --- a/packages/libp2p/test/connection/index.spec.ts +++ b/packages/libp2p/test/connection/index.spec.ts @@ -1,17 +1,80 @@ +import { StreamCloseEvent, TypedEventEmitter } from '@libp2p/interface' +import { defaultLogger } from '@libp2p/logger' import { peerIdFromString } from '@libp2p/peer-id' +import { echoStream, streamPair, echo } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' +import delay from 'delay' +import { encode } from 'it-length-prefixed' +import Sinon from 'sinon' +import { stubInterface } from 'sinon-ts' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { createConnection } from '../../src/connection.js' -import { defaultConnectionComponents, defaultConnectionInit } from './utils.ts' +import { UnhandledProtocolError } from '../../src/errors.ts' import type { ConnectionComponents, ConnectionInit } from '../../src/connection.js' +import type { MultiaddrConnection, PeerStore, StreamMuxer, StreamMuxerEvents } from '@libp2p/interface' +import type { Registrar } from '@libp2p/interface-internal' +import type { StubbedInstance } from 'sinon-ts' + +const ECHO_PROTOCOL = '/echo/0.0.1' describe('connection', () => { let components: ConnectionComponents + let peerStore: StubbedInstance + let registrar: StubbedInstance let init: ConnectionInit + let muxer: StubbedInstance + let maConn: StubbedInstance beforeEach(async () => { - components = defaultConnectionComponents() - init = await defaultConnectionInit() + maConn = stubInterface({ + remoteAddr: multiaddr('/ip4/127.0.0.1/tcp/1234'), + timeline: { + open: Date.now() + }, + log: defaultLogger().forComponent('libp2p:maconn') + }) + peerStore = stubInterface() + registrar = stubInterface() + + registrar.getHandler.withArgs(ECHO_PROTOCOL).returns({ + handler (stream): void { + echo(stream) + }, + options: {} + }) + + components = { + peerStore, + registrar + } + muxer = stubInterface({ + streams: [] + }) + const emitter = new TypedEventEmitter() + muxer.addEventListener.callsFake(emitter.addEventListener.bind(emitter)) + muxer.dispatchEvent.callsFake(emitter.dispatchEvent.bind(emitter)) + muxer.safeDispatchEvent.callsFake(emitter.safeDispatchEvent.bind(emitter)) + + muxer.createStream.callsFake(async () => { + const stream = await echoStream() + muxer.streams.push(stream) + + stream.addEventListener('close', () => { + muxer.streams = muxer.streams.filter(s => s !== stream) + }) + + return stream + }) + + init = stubInterface({ + maConn, + stream: maConn, + muxer, + limits: undefined, + outboundStreamProtocolNegotiationTimeout: 5_000, + inboundStreamProtocolNegotiationTimeout: 5_000 + }) }) it('should not require local or remote addrs', async () => { @@ -21,21 +84,223 @@ describe('connection', () => { }) it('should append remote peer id to address if not already present', async () => { - const conn = createConnection(components, await defaultConnectionInit({ - remoteAddr: multiaddr('/ip4/123.123.123.123/tcp/1234') - })) + maConn.remoteAddr = multiaddr('/ip4/123.123.123.123/tcp/1234') + + const conn = createConnection(components, init) expect(conn.remoteAddr.getComponents().filter(component => component.name === 'p2p')).to.have.lengthOf(1) }) it('should not append remote peer id to address if present', async () => { const remotePeer = peerIdFromString('QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN') + maConn.remoteAddr = multiaddr(`/ip4/123.123.123.123/tcp/1234/p2p/${remotePeer}`) - const conn = createConnection(components, await defaultConnectionInit({ - remotePeer, - remoteAddr: multiaddr(`/ip4/123.123.123.123/tcp/1234/p2p/${remotePeer}`) - })) + const conn = createConnection(components, init) expect(conn.remoteAddr.getComponents().filter(component => component.name === 'p2p')).to.have.lengthOf(1) }) + + it('should have properties set', () => { + const connection = createConnection(components, init) + + expect(connection.id).to.exist() + expect(connection.remotePeer).to.exist() + expect(connection.remoteAddr).to.exist() + expect(connection.status).to.equal('open') + expect(connection.timeline.open).to.exist() + expect(connection.timeline.close).to.not.exist() + expect(connection.direction).to.exist() + expect(connection.streams).to.eql([]) + }) + + it('should get the metadata of an open connection', () => { + const connection = createConnection(components, init) + + expect(connection.status).to.equal('open') + expect(connection.direction).to.exist() + expect(connection.timeline.open).to.exist() + expect(connection.timeline.close).to.not.exist() + }) + + it('should return an empty array of streams', () => { + const connection = createConnection(components, init) + const streams = connection.streams + + expect(streams).to.eql([]) + }) + + it('should be able to create a new stream', async () => { + const connection = createConnection(components, init) + expect(connection.streams).to.be.empty() + + const stream = await connection.newStream([ECHO_PROTOCOL]) + expect(stream).to.have.property('protocol', ECHO_PROTOCOL) + + expect(connection.streams).to.include(stream) + }) + + it('should be able to close the connection after being created', async () => { + const connection = createConnection(components, init) + expect(connection.timeline.close).to.not.exist() + await connection.close() + + expect(connection.timeline.close).to.exist() + expect(connection.status).to.equal('closed') + }) + + it('should be able to close the connection after opening a stream', async () => { + muxer.createStream.resolves(echoStream()) + registrar.getHandler.withArgs(ECHO_PROTOCOL).returns({ + handler (stream): void { + echo(stream) + }, + options: {} + }) + + const connection = createConnection(components, init) + await connection.newStream([ECHO_PROTOCOL]) + + // Close connection + expect(connection.timeline.close).to.not.exist() + await connection.close() + + expect(connection.timeline.close).to.exist() + expect(connection.status).to.equal('closed') + }) + + it('should remove streams that close', async () => { + const connection = createConnection(components, init) + + // Open stream + const stream = await connection.newStream([ECHO_PROTOCOL]) + expect(connection.streams).to.include(stream) + + // Close stream + await stream.closeWrite() + stream.dispatchEvent(new StreamCloseEvent()) + + expect(connection.streams).to.not.include(stream) + }) + + it('should remove streams that error', async () => { + const connection = createConnection(components, init) + + // Open stream + const stream = await connection.newStream([ECHO_PROTOCOL]) + expect(connection.streams).to.include(stream) + + // Abort stream + stream.abort(new Error('Urk!')) + + expect(connection.streams).to.not.include(stream) + }) + + it('should fail to create a new stream if the connection is closing', async () => { + const connection = createConnection(components, init) + + expect(connection.timeline.close).to.not.exist() + const p = connection.close() + + try { + const protocol = ECHO_PROTOCOL + await connection.newStream([protocol]) + } catch (err: any) { + expect(err).to.exist() + return + } finally { + await p + } + + throw new Error('should fail to create a new stream if the connection is closing') + }) + + it('should fail to create a new stream if the connection is closed', async () => { + const connection = createConnection(components, init) + + expect(connection.timeline.close).to.not.exist() + await connection.close() + + await expect(connection.newStream(['/echo/0.0.1'])).to.eventually.be.rejected + .with.property('name', 'ConnectionClosedError') + }) + + it('should limit the number of incoming streams that can be opened using a protocol', async () => { + const protocol = '/test/protocol' + const maxInboundStreams = 2 + + registrar.getHandler.withArgs(protocol).returns({ + handler: Sinon.stub(), + options: { + maxInboundStreams + } + }) + registrar.getProtocols.returns([protocol]) + + const connection = createConnection(components, init) + expect(connection.streams).to.have.lengthOf(0) + + for (let i = 0; i < (maxInboundStreams + 1); i++) { + const [outboundStream, inboundStream] = await streamPair() + outboundStream.send(encode.single(uint8ArrayFromString('/multistream/1.0.0\n'))) + outboundStream.send(encode.single(uint8ArrayFromString(`${protocol}\n`))) + + muxer.streams.push(inboundStream) + muxer.safeDispatchEvent('stream', { + detail: inboundStream + }) + + await delay(50) + } + + await delay(100) + + expect(muxer.streams).to.have.lengthOf(3) + expect(muxer.streams[0]).to.have.property('status', 'open') + expect(muxer.streams[1]).to.have.property('status', 'open') + expect(muxer.streams[2]).to.have.property('status', 'aborted') + }) + + it('should limit the number of outgoing streams that can be opened using a protocol', async () => { + const protocol = '/test/protocol' + const maxOutboundStreams = 2 + + registrar.getHandler.withArgs(protocol).returns({ + handler: Sinon.stub(), + options: { + maxOutboundStreams + } + }) + registrar.getProtocols.returns([protocol]) + + const connection = createConnection(components, init) + expect(connection.streams).to.have.lengthOf(0) + + expect(connection.streams).to.have.lengthOf(0) + + await connection.newStream(protocol) + await connection.newStream(protocol) + + await expect(connection.newStream(protocol)).to.eventually.be.rejected + .with.property('name', 'TooManyOutboundProtocolStreamsError') + }) + + it('should allow overriding the number of outgoing streams that can be opened using a protocol without a handler', async () => { + const protocol = '/test/protocol' + + registrar.getHandler.withArgs(protocol).throws(new UnhandledProtocolError()) + + const connection = createConnection(components, init) + expect(connection.streams).to.have.lengthOf(0) + + const opts = { + maxOutboundStreams: 3 + } + + await connection.newStream(protocol, opts) + await connection.newStream(protocol, opts) + await connection.newStream(protocol, opts) + + await expect(connection.newStream(protocol, opts)).to.eventually.be.rejected + .with.property('name', 'TooManyOutboundProtocolStreamsError') + }) }) diff --git a/packages/libp2p/test/connection/utils.ts b/packages/libp2p/test/connection/utils.ts deleted file mode 100644 index 8c549c745d..0000000000 --- a/packages/libp2p/test/connection/utils.ts +++ /dev/null @@ -1,102 +0,0 @@ -import { generateKeyPair } from '@libp2p/crypto/keys' -import { defaultLogger } from '@libp2p/logger' -import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { multiaddr } from '@multiformats/multiaddr' -import { expect } from 'aegir/chai' -import drain from 'it-drain' -import { encode } from 'it-length-prefixed' -import map from 'it-map' -import { pipe } from 'it-pipe' -import { stubInterface } from 'sinon-ts' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { Registrar } from '../../src/registrar.ts' -import type { ConnectionComponents, ConnectionInit } from '../../src/connection.js' -import type { PeerStore, PeerId, StreamMuxerFactory, MultiaddrConnection, StreamMuxer, Stream } from '@libp2p/interface' -import type { Multiaddr } from '@multiformats/multiaddr' - -export const ECHO_PROTOCOL = '/echo/0.0.1' - -const registrar = stubInterface({ - getProtocols: () => [ - ECHO_PROTOCOL - ], - getHandler: (proto) => { - expect(proto).to.equal(ECHO_PROTOCOL) - - return { - handler: ({ stream }) => { - void pipe(stream, stream) - }, - options: { - - } - } - } -}) - -export function defaultConnectionComponents (): ConnectionComponents { - return { - peerStore: stubInterface(), - registrar - } -} - -interface DefaultConnectionInit { - remotePeer?: PeerId - remoteAddr?: Multiaddr -} - -export async function defaultConnectionInit (init: DefaultConnectionInit = {}): Promise { - const remotePeer = init.remotePeer ?? peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const log = defaultLogger().forComponent('connection') - const streams: Stream[] = [] - - return { - id: `${(parseInt(String(Math.random() * 1e9))).toString(36)}${Date.now()}`, - remotePeer, - maConn: stubInterface({ - log, - remoteAddr: multiaddr('/ip4/123.123.123.123/tcp/1234'), - sink: async (source) => { - await drain(source) - }, - source: (async function * () {})() - }), - direction: 'outbound', - encryption: '/secio/1.0.0', - muxerFactory: stubInterface({ - createStreamMuxer: () => stubInterface({ - sink: async (source) => { - await drain(source) - }, - source: (async function * () {})(), - streams, - newStream: () => { - const stream = stubInterface({ - log, - sink: async (source) => { - await drain(source) - }, - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${ECHO_PROTOCOL}\n` - yield 'hello' - })(), str => encode.single(uint8ArrayFromString(str))), - close: async () => { - for (let i = 0; i < streams.length; i++) { - if (streams[i] === stream) { - streams.splice(i, 1) - i-- - } - } - } - }) - - streams.push(stream) - - return stream - } - }) - }) - } -} diff --git a/packages/libp2p/test/upgrading/upgrader.spec.ts b/packages/libp2p/test/upgrading/upgrader.spec.ts index 98ba20e73c..c97ef82642 100644 --- a/packages/libp2p/test/upgrading/upgrader.spec.ts +++ b/packages/libp2p/test/upgrading/upgrader.spec.ts @@ -1,24 +1,20 @@ /* eslint-env mocha */ import { generateKeyPair } from '@libp2p/crypto/keys' -import { logger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { multiaddr } from '@multiformats/multiaddr' +import { multiaddrConnectionPair, streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import delay from 'delay' -import drain from 'it-drain' import { encode } from 'it-length-prefixed' -import map from 'it-map' +import * as lp from 'it-length-prefixed' import { pEvent } from 'p-event' -import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { Upgrader } from '../../src/upgrader.js' import { createDefaultUpgraderComponents } from './utils.js' import type { UpgraderComponents, UpgraderInit } from '../../src/upgrader.js' -import type { ConnectionEncrypter, StreamMuxerFactory, MultiaddrConnection, StreamMuxer, ConnectionProtector, PeerId, SecuredConnection, Stream, StreamMuxerInit, Connection } from '@libp2p/interface' -import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' -import type { Multiaddr } from '@multiformats/multiaddr' +import type { ConnectionEncrypter, StreamMuxerFactory, StreamMuxer, ConnectionProtector, PeerId, SecuredConnection, Connection } from '@libp2p/interface' +import type { ConnectionManager } from '@libp2p/interface-internal' describe('upgrader', () => { let components: UpgraderComponents @@ -26,8 +22,12 @@ describe('upgrader', () => { const encrypterProtocol = '/test-encrypter' const muxerProtocol = '/test-muxer' let remotePeer: PeerId - let remoteAddr: Multiaddr - let maConn: MultiaddrConnection + + const handshake = [ + lp.encode.single(uint8ArrayFromString('/multistream/1.0.0\n')), + lp.encode.single(uint8ArrayFromString(`${encrypterProtocol}\n`)), + lp.encode.single(uint8ArrayFromString(`${muxerProtocol}\n`)) + ] class BoomCrypto implements ConnectionEncrypter { static protocol = encrypterProtocol @@ -38,7 +38,6 @@ describe('upgrader', () => { beforeEach(async () => { remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - remoteAddr = multiaddr(`/ip4/123.123.123.123/tcp/1234/p2p/${remotePeer}`) components = await createDefaultUpgraderComponents() init = { @@ -46,11 +45,11 @@ describe('upgrader', () => { stubInterface({ protocol: encrypterProtocol, secureOutbound: async (connection) => ({ - conn: connection, + connection, remotePeer }), secureInbound: async (connection) => ({ - conn: connection, + connection, remotePeer }) }) @@ -59,29 +58,23 @@ describe('upgrader', () => { stubInterface({ protocol: muxerProtocol, createStreamMuxer: () => stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})() + protocol: muxerProtocol }) }) ] } - - maConn = stubInterface({ - remoteAddr, - log: logger('test'), - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${encrypterProtocol}\n` - yield `${muxerProtocol}\n` - })(), str => encode.single(uint8ArrayFromString(str))) - }) }) it('should upgrade outbound with valid muxers and crypto', async () => { const upgrader = new Upgrader(components, init) - const conn = await upgrader.upgradeOutbound(maConn, { + + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + const conn = await upgrader.upgradeOutbound(outbound, { signal: AbortSignal.timeout(5_000) }) expect(conn.encryption).to.equal(encrypterProtocol) @@ -94,7 +87,13 @@ describe('upgrader', () => { streamMuxers: [] }) - const connection = await upgrader.upgradeOutbound(maConn, { + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + const connection = await upgrader.upgradeOutbound(outbound, { signal: AbortSignal.timeout(5_000) }) @@ -111,9 +110,20 @@ describe('upgrader', () => { connectionProtector }, init) - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + const [outbound, inbound] = multiaddrConnectionPair() + + await Promise.all([ + upgrader.upgradeInbound(inbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + outbound.send(buf) + }) + })() + ]) expect(connectionProtector.protect.callCount).to.equal(1) }) @@ -127,9 +137,20 @@ describe('upgrader', () => { connectionProtector }, init) - await upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + const [outbound, inbound] = multiaddrConnectionPair() + + await Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ]) expect(connectionProtector.protect.callCount).to.equal(1) }) @@ -142,9 +163,20 @@ describe('upgrader', () => { ] }) - await expect(upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - })).to.eventually.be.rejected + const [outbound, inbound] = multiaddrConnectionPair() + + await expect(Promise.all([ + upgrader.upgradeInbound(inbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + outbound.send(buf) + }) + })() + ])).to.eventually.be.rejected .with.property('name', 'EncryptionFailedError') }) @@ -156,9 +188,24 @@ describe('upgrader', () => { ] }) - await expect(upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(5_000) - })).to.eventually.be.rejected + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + await expect(Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ])).to.eventually.be.rejected .with.property('name', 'EncryptionFailedError') }) @@ -168,12 +215,15 @@ describe('upgrader', () => { inboundUpgradeTimeout: 100 }) - maConn.source = map(maConn.source, async (buf) => { - await delay(2000) - return buf + const [outbound, inbound] = multiaddrConnectionPair({ + delay: 2_000 + }) + + handshake.forEach(buf => { + outbound.send(buf) }) - await expect(upgrader.upgradeInbound(maConn, { + await expect(upgrader.upgradeInbound(inbound, { signal: AbortSignal.timeout(5_000) })).to.eventually.be.rejected .with.property('message').that.include('aborted') @@ -185,14 +235,22 @@ describe('upgrader', () => { inboundUpgradeTimeout: 10000 }) - maConn.source = map(maConn.source, async (buf) => { - await delay(2000) - return buf + const [outbound, inbound] = multiaddrConnectionPair({ + delay: 2_000 }) - await expect(upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(100) - })).to.eventually.be.rejected + await expect(Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ])).to.eventually.be.rejected .with.property('message').that.include('aborted') }) @@ -205,9 +263,20 @@ describe('upgrader', () => { const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + const [outbound, inbound] = multiaddrConnectionPair() + + await Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ]) const event = await connectionPromise @@ -222,9 +291,21 @@ describe('upgrader', () => { ...init, inboundUpgradeTimeout: 100 }) - const conn = await upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + + const [outbound, inbound] = multiaddrConnectionPair() + + const [conn] = await Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ]) await delay(1000) @@ -241,9 +322,19 @@ describe('upgrader', () => { const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(100) - }) + const [outbound, inbound] = multiaddrConnectionPair() + await Promise.all([ + upgrader.upgradeInbound(inbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + outbound.send(buf) + }) + })() + ]) const event = await connectionPromise @@ -258,9 +349,21 @@ describe('upgrader', () => { ...init, inboundUpgradeTimeout: 10000 }) - const conn = await upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(100) - }) + + const [outbound, inbound] = multiaddrConnectionPair() + + const [conn] = await Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + handshake.forEach(buf => { + inbound.send(buf) + }) + })() + ]) await delay(1000) @@ -269,29 +372,33 @@ describe('upgrader', () => { }) it('should abort protocol selection for slow outbound stream creation', async () => { + const [outboundStream, inboundStream] = await streamPair({ + delay: 2_000 + }) + const upgrader = new Upgrader(components, { ...init, streamMuxers: [ stubInterface({ protocol: muxerProtocol, createStreamMuxer: () => stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})(), - newStream: () => stubInterface({ - id: 'stream-id', - log: logger('test-stream'), - sink: async (source) => drain(source), - source: (async function * (): any { - await delay(2000) - yield Uint8Array.from([0, 1, 2, 3, 4]) - })() - }) + createStream: () => { + return outboundStream + } }) }) ] }) - const conn = await upgrader.upgradeOutbound(maConn, { + + inboundStream.send(Uint8Array.from([0, 1, 2, 3, 4])) + + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + const conn = await upgrader.upgradeOutbound(outbound, { signal: AbortSignal.timeout(5_000) }) @@ -302,7 +409,9 @@ describe('upgrader', () => { }) it('should abort stream when protocol negotiation fails on outbound stream', async () => { - let stream: Stream | undefined + const [outboundStream, inboundStream] = await streamPair({ + delay: 2_000 + }) const upgrader = new Upgrader(components, { ...init, @@ -310,30 +419,24 @@ describe('upgrader', () => { stubInterface({ protocol: muxerProtocol, createStreamMuxer: () => stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () { - await delay(2000) - yield Uint8Array.from([0, 1, 2, 3, 4]) - })(), - newStream: () => { - stream = stubInterface({ - id: 'stream-id', - log: logger('test-stream'), - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield '/different/protocol\n' - })(), str => encode.single(uint8ArrayFromString(str))) - }) - - return stream + createStream: () => { + return outboundStream } }) }) ] }) - const conn = await upgrader.upgradeOutbound(maConn, { + + inboundStream.send(encode.single(uint8ArrayFromString('/multistream/1.0.0\n'))) + inboundStream.send(encode.single(uint8ArrayFromString('/different/protocol\n'))) + + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + const conn = await upgrader.upgradeOutbound(outbound, { signal: AbortSignal.timeout(5_000) }) @@ -343,7 +446,7 @@ describe('upgrader', () => { // wait for remote to close await delay(100) - expect(stream?.abort).to.have.property('called', true) + expect(outboundStream).to.have.property('status', 'aborted') }) it('should allow skipping outbound encryption and protection', async () => { @@ -360,15 +463,19 @@ describe('upgrader', () => { connectionEncrypter ] }) - await upgrader.upgradeOutbound(maConn, { + + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + inbound.send(buf) + }) + + await upgrader.upgradeOutbound(outbound, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface({ - createStreamMuxer: () => stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})() - }) + createStreamMuxer: () => stubInterface() }), signal: AbortSignal.timeout(5_000) }) @@ -391,15 +498,19 @@ describe('upgrader', () => { connectionEncrypter ] }) - await upgrader.upgradeInbound(maConn, { + + const [outbound, inbound] = multiaddrConnectionPair() + + handshake.forEach(buf => { + outbound.send(buf) + }) + + await upgrader.upgradeInbound(inbound, { skipEncryption: true, + remotePeer, skipProtection: true, muxerFactory: stubInterface({ - createStreamMuxer: () => stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})() - }) + createStreamMuxer: () => stubInterface() }), signal: AbortSignal.timeout(5_000) }) @@ -410,227 +521,29 @@ describe('upgrader', () => { it('should not decrement inbound pending connection count if the connection is denied', async () => { const components = await createDefaultUpgraderComponents({ connectionManager: stubInterface({ - acceptIncomingConnection: async () => false + acceptIncomingConnection: () => false }) }) const upgrader = new Upgrader(components, init) - await expect(upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - })).to.eventually.be.rejected - .with.property('name', 'ConnectionDeniedError') - - expect(components.connectionManager.afterUpgradeInbound).to.have.property('called', false) - }) - - it('should limit the number of incoming streams that can be opened using a protocol', async () => { - const protocol = '/test/protocol' - const maxInboundStreams = 2 - let streamMuxerInit: StreamMuxerInit | undefined - let streamMuxer: StreamMuxer | undefined - const components = await createDefaultUpgraderComponents({ - registrar: stubInterface({ - getHandler: () => ({ - options: { - maxInboundStreams - }, - handler: Sinon.stub() - }), - getProtocols: () => [protocol] - }) - }) - const upgrader = new Upgrader(components, { - ...init, - streamMuxers: [ - stubInterface({ - protocol: muxerProtocol, - createStreamMuxer: (init) => { - streamMuxerInit = init - streamMuxer = stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})(), - streams: [] - }) - return streamMuxer - } - }) - ] - }) - - const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') - - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) - - const event = await connectionPromise - - expect(event.detail.streams).to.have.lengthOf(0) - - for (let i = 0; i < (maxInboundStreams + 1); i++) { - const incomingStream = stubInterface({ - id: `stream-id-${i}`, - log: logger('test-stream'), - direction: 'inbound', - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${protocol}\n` - })(), str => encode.single(uint8ArrayFromString(str))), - close: async () => {} - }) - - streamMuxer?.streams.push(incomingStream) - streamMuxerInit?.onIncomingStream?.(incomingStream) - } - - await delay(100) - - expect(streamMuxer?.streams).to.have.lengthOf(3) - expect(streamMuxer?.streams[0]).to.have.nested.property('abort.called', false) - expect(streamMuxer?.streams[1]).to.have.nested.property('abort.called', false) - expect(streamMuxer?.streams[2]).to.have.nested.property('abort.called', true) - }) - - it('should limit the number of outgoing streams that can be opened using a protocol', async () => { - const protocol = '/test/protocol' - const maxOutboundStreams = 2 - let streamMuxer: StreamMuxer | undefined - const components = await createDefaultUpgraderComponents({ - registrar: stubInterface({ - getHandler: () => ({ - options: { - maxOutboundStreams - }, - handler: Sinon.stub() - }), - getProtocols: () => [protocol] - }) - }) - const upgrader = new Upgrader(components, { - ...init, - streamMuxers: [ - stubInterface({ - protocol: muxerProtocol, - createStreamMuxer: () => { - streamMuxer = stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})(), - streams: [], - newStream: () => { - const outgoingStream = stubInterface({ - id: 'stream-id', - log: logger('test-stream'), - direction: 'outbound', - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${protocol}\n` - })(), str => encode.single(uint8ArrayFromString(str))) - }) - - streamMuxer?.streams.push(outgoingStream) - return outgoingStream - } - }) - return streamMuxer - } - }) - ] - }) - - const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') - - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) - - const event = await connectionPromise - const conn = event.detail - - expect(conn.streams).to.have.lengthOf(0) - await conn.newStream(protocol) - await conn.newStream(protocol) + const [outbound, inbound] = multiaddrConnectionPair() - await expect(conn.newStream(protocol)).to.eventually.be.rejected - .with.property('name', 'TooManyOutboundProtocolStreamsError') - }) - - it('should allow overriding the number of outgoing streams that can be opened using a protocol without a handler', async () => { - const protocol = '/test/protocol' - let streamMuxer: StreamMuxer | undefined - const components = await createDefaultUpgraderComponents({ - registrar: stubInterface({ - getHandler: () => ({ - options: {}, - handler: Sinon.stub() - }), - getProtocols: () => [protocol] - }) - }) - const upgrader = new Upgrader(components, { - ...init, - streamMuxers: [ - stubInterface({ - protocol: muxerProtocol, - createStreamMuxer: () => { - streamMuxer = stubInterface({ - protocol: muxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})(), - streams: [], - newStream: () => { - const outgoingStream = stubInterface({ - id: 'stream-id', - log: logger('test-stream'), - direction: 'outbound', - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${protocol}\n` - })(), str => encode.single(uint8ArrayFromString(str))) - }) - - streamMuxer?.streams.push(outgoingStream) - return outgoingStream - } - }) - return streamMuxer - } - }) - ] + handshake.forEach(buf => { + outbound.send(buf) }) - const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') - - await upgrader.upgradeInbound(maConn, { + await expect(upgrader.upgradeInbound(inbound, { signal: AbortSignal.timeout(5_000) - }) - - const event = await connectionPromise - const conn = event.detail - - expect(conn.streams).to.have.lengthOf(0) - - const opts = { - maxOutboundStreams: 3 - } - - await conn.newStream(protocol, opts) - await conn.newStream(protocol, opts) - await conn.newStream(protocol, opts) + })).to.eventually.be.rejected + .with.property('name', 'ConnectionDeniedError') - await expect(conn.newStream(protocol, opts)).to.eventually.be.rejected - .with.property('name', 'TooManyOutboundProtocolStreamsError') + expect(components.connectionManager.afterUpgradeInbound).to.have.property('called', false) }) describe('early muxer selection', () => { let earlyMuxerProtocol: string let streamMuxerFactory: StreamMuxerFactory let upgrader: Upgrader - let maConn: MultiaddrConnection let encrypterProtocol: string beforeEach(async () => { @@ -639,9 +552,7 @@ describe('upgrader', () => { streamMuxerFactory = stubInterface({ protocol: earlyMuxerProtocol, createStreamMuxer: () => stubInterface({ - protocol: earlyMuxerProtocol, - sink: async (source) => drain(source), - source: (async function * () {})() + protocol: earlyMuxerProtocol }) }) @@ -650,12 +561,12 @@ describe('upgrader', () => { stubInterface({ protocol: encrypterProtocol, secureOutbound: async (connection) => ({ - conn: connection, + connection, remotePeer, streamMuxer: streamMuxerFactory }), secureInbound: async (connection) => ({ - conn: connection, + connection, remotePeer, streamMuxer: streamMuxerFactory }) @@ -665,31 +576,28 @@ describe('upgrader', () => { stubInterface({ protocol: '/late-muxer', createStreamMuxer: () => stubInterface({ - protocol: '/late-muxer', - sink: async (source) => drain(source), - source: (async function * () {})() + protocol: '/late-muxer' }) }) ] }) - - maConn = stubInterface({ - remoteAddr, - log: logger('test'), - sink: async (source) => drain(source), - source: map((async function * () { - yield '/multistream/1.0.0\n' - yield `${encrypterProtocol}\n` - })(), str => encode.single(uint8ArrayFromString(str))) - }) }) it('should allow early muxer selection on inbound connection', async () => { const connectionPromise = pEvent<'connection:open', CustomEvent>(components.events, 'connection:open') + const [outbound, inbound] = multiaddrConnectionPair() - await upgrader.upgradeInbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + await Promise.all([ + upgrader.upgradeInbound(inbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + outbound.send(encode.single(uint8ArrayFromString('/multistream/1.0.0\n'))) + outbound.send(encode.single(uint8ArrayFromString(`${encrypterProtocol}\n`))) + })() + ]) const event = await connectionPromise const conn = event.detail @@ -698,9 +606,19 @@ describe('upgrader', () => { }) it('should allow early muxer selection on outbound connection', async () => { - const conn = await upgrader.upgradeOutbound(maConn, { - signal: AbortSignal.timeout(5_000) - }) + const [outbound, inbound] = multiaddrConnectionPair() + + const [conn] = await Promise.all([ + upgrader.upgradeOutbound(outbound, { + signal: AbortSignal.timeout(5_000) + }), + (async () => { + await delay(10) + + inbound.send(encode.single(uint8ArrayFromString('/multistream/1.0.0\n'))) + inbound.send(encode.single(uint8ArrayFromString(`${encrypterProtocol}\n`))) + })() + ]) expect(conn.multiplexer).to.equal(earlyMuxerProtocol) }) diff --git a/packages/libp2p/test/upgrading/utils.ts b/packages/libp2p/test/upgrading/utils.ts index 9c57a8a58d..cac15f9f35 100644 --- a/packages/libp2p/test/upgrading/utils.ts +++ b/packages/libp2p/test/upgrading/utils.ts @@ -27,7 +27,7 @@ export async function createDefaultUpgraderComponents (options?: Partial({ - acceptIncomingConnection: async () => true + acceptIncomingConnection: () => true }), connectionGater: stubInterface(), registrar: stubInterface(), diff --git a/packages/metrics-opentelemetry/package.json b/packages/metrics-opentelemetry/package.json index a82f6c1872..b0fe53e652 100644 --- a/packages/metrics-opentelemetry/package.json +++ b/packages/metrics-opentelemetry/package.json @@ -42,9 +42,7 @@ "dependencies": { "@libp2p/interface": "^2.10.5", "@libp2p/utils": "^6.7.1", - "@opentelemetry/api": "^1.9.0", - "it-foreach": "^2.1.3", - "it-stream-types": "^2.0.2" + "@opentelemetry/api": "^1.9.0" }, "devDependencies": { "@libp2p/logger": "^5.1.21", diff --git a/packages/metrics-opentelemetry/src/index.ts b/packages/metrics-opentelemetry/src/index.ts index 841b6c2356..8a9bbc2ded 100644 --- a/packages/metrics-opentelemetry/src/index.ts +++ b/packages/metrics-opentelemetry/src/index.ts @@ -34,11 +34,8 @@ */ import { InvalidParametersError, serviceCapabilities } from '@libp2p/interface' -import { isAsyncGenerator } from '@libp2p/utils/is-async-generator' -import { isGenerator } from '@libp2p/utils/is-generator' -import { isPromise } from '@libp2p/utils/is-promise' +import { isAsyncGenerator, isGenerator, isPromise } from '@libp2p/utils' import { trace, metrics, context, SpanStatusCode } from '@opentelemetry/api' -import each from 'it-foreach' import { OpenTelemetryCounterGroup } from './counter-group.js' import { OpenTelemetryCounter } from './counter.js' import { OpenTelemetryHistogramGroup } from './histogram-group.js' @@ -48,9 +45,8 @@ import { OpenTelemetryMetric } from './metric.js' import { OpenTelemetrySummaryGroup } from './summary-group.js' import { OpenTelemetrySummary } from './summary.js' import { collectSystemMetrics } from './system-metrics.js' -import type { MultiaddrConnection, Stream, Connection, Metric, MetricGroup, Metrics, CalculatedMetricOptions, MetricOptions, Counter, CounterGroup, Histogram, HistogramOptions, HistogramGroup, Summary, SummaryOptions, SummaryGroup, CalculatedHistogramOptions, CalculatedSummaryOptions, NodeInfo, TraceFunctionOptions, TraceGeneratorFunctionOptions, TraceAttributes, ComponentLogger, Logger } from '@libp2p/interface' +import type { MultiaddrConnection, Stream, Metric, MetricGroup, Metrics, CalculatedMetricOptions, MetricOptions, Counter, CounterGroup, Histogram, HistogramOptions, HistogramGroup, Summary, SummaryOptions, SummaryGroup, CalculatedHistogramOptions, CalculatedSummaryOptions, NodeInfo, TraceFunctionOptions, TraceGeneratorFunctionOptions, TraceAttributes, ComponentLogger, Logger, MessageStream } from '@libp2p/interface' import type { Span, Attributes, Meter, Observable } from '@opentelemetry/api' -import type { Duplex } from 'it-stream-types' // see https://betterstack.com/community/guides/observability/opentelemetry-metrics-nodejs/#prerequisites @@ -155,27 +151,24 @@ class OpenTelemetryMetrics implements Metrics { * Override the sink/source of the stream to count the bytes * in and out */ - _track (stream: Duplex>, name: string): void { - const self = this + _track (stream: MessageStream, name: string): void { + stream.addEventListener('message', (evt) => { + this._incrementValue(`${name} received`, evt.data.byteLength) + }) - const sink = stream.sink - stream.sink = async function trackedSink (source) { - await sink(each(source, buf => { - self._incrementValue(`${name} sent`, buf.byteLength) - })) - } + const send = stream.send.bind(stream) + stream.send = (buf) => { + this._incrementValue(`${name} sent`, buf.byteLength) - const source = stream.source - stream.source = each(source, buf => { - self._incrementValue(`${name} received`, buf.byteLength) - }) + return send(buf) + } } trackMultiaddrConnection (maConn: MultiaddrConnection): void { this._track(maConn, 'global') } - trackProtocolStream (stream: Stream, connection: Connection): void { + trackProtocolStream (stream: Stream): void { if (stream.protocol == null) { // protocol not negotiated yet, should not happen as the upgrader // calls this handler after protocol negotiation diff --git a/packages/metrics-prometheus/package.json b/packages/metrics-prometheus/package.json index 3523b87240..33c8e690fe 100644 --- a/packages/metrics-prometheus/package.json +++ b/packages/metrics-prometheus/package.json @@ -42,21 +42,13 @@ }, "dependencies": { "@libp2p/interface": "^2.10.5", - "it-foreach": "^2.1.3", - "it-stream-types": "^2.0.2", - "prom-client": "^15.1.3", - "uint8arraylist": "^2.4.8" + "prom-client": "^15.1.3" }, "devDependencies": { - "@libp2p/crypto": "^5.1.7", - "@libp2p/interface-compliance-tests": "^6.4.16", "@libp2p/logger": "^5.1.21", - "@libp2p/peer-id": "^5.1.8", - "@multiformats/multiaddr": "^12.4.4", + "@libp2p/utils": "^6.7.1", "aegir": "^47.0.14", - "it-drain": "^3.0.9", - "it-pipe": "^3.0.1", - "p-defer": "^4.0.1" + "race-event": "^1.6.1" }, "sideEffects": false } diff --git a/packages/metrics-prometheus/src/index.ts b/packages/metrics-prometheus/src/index.ts index a7ba826fd0..94a7b8512d 100644 --- a/packages/metrics-prometheus/src/index.ts +++ b/packages/metrics-prometheus/src/index.ts @@ -69,7 +69,6 @@ import { statfs } from 'node:fs/promises' import { totalmem } from 'node:os' import { serviceCapabilities } from '@libp2p/interface' -import each from 'it-foreach' import { collectDefaultMetrics, register } from 'prom-client' import { PrometheusCounterGroup } from './counter-group.js' import { PrometheusCounter } from './counter.js' @@ -79,10 +78,8 @@ import { PrometheusMetricGroup } from './metric-group.js' import { PrometheusMetric } from './metric.js' import { PrometheusSummaryGroup } from './summary-group.js' import { PrometheusSummary } from './summary.js' -import type { ComponentLogger, Logger, MultiaddrConnection, Stream, Connection, CalculatedMetricOptions, Counter, CounterGroup, Metric, MetricGroup, MetricOptions, Metrics, CalculatedHistogramOptions, CalculatedSummaryOptions, HistogramOptions, Histogram, HistogramGroup, SummaryOptions, Summary, SummaryGroup } from '@libp2p/interface' -import type { Duplex } from 'it-stream-types' +import type { ComponentLogger, Logger, MultiaddrConnection, Stream, CalculatedMetricOptions, Counter, CounterGroup, Metric, MetricGroup, MetricOptions, Metrics, CalculatedHistogramOptions, CalculatedSummaryOptions, HistogramOptions, Histogram, HistogramGroup, SummaryOptions, Summary, SummaryGroup, MessageStream } from '@libp2p/interface' import type { DefaultMetricsCollectorConfiguration, Registry, RegistryContentType } from 'prom-client' -import type { Uint8ArrayList } from 'uint8arraylist' // export helper functions for creating buckets export { linearBuckets, exponentialBuckets } from 'prom-client' @@ -238,27 +235,24 @@ class PrometheusMetrics implements Metrics { * Override the sink/source of the stream to count the bytes * in and out */ - _track (stream: Duplex>, name: string): void { - const self = this - - const sink = stream.sink - stream.sink = async function trackedSink (source) { - await sink(each(source, buf => { - self._incrementValue(`${name} sent`, buf.byteLength) - })) - } - - const source = stream.source - stream.source = each(source, buf => { - self._incrementValue(`${name} received`, buf.byteLength) + _track (stream: MessageStream, name: string): void { + stream.addEventListener('message', (evt) => { + this._incrementValue(`${name} received`, evt.data.byteLength) }) + + const send = stream.send.bind(stream) + stream.send = (buf) => { + this._incrementValue(`${name} sent`, buf.byteLength) + + return send(buf) + } } trackMultiaddrConnection (maConn: MultiaddrConnection): void { this._track(maConn, 'global') } - trackProtocolStream (stream: Stream, connection: Connection): void { + trackProtocolStream (stream: Stream): void { if (stream.protocol == null) { // protocol not negotiated yet, should not happen as the upgrader // calls this handler after protocol negotiation diff --git a/packages/metrics-prometheus/test/streams.spec.ts b/packages/metrics-prometheus/test/streams.spec.ts index 038565e41c..3ebe002304 100644 --- a/packages/metrics-prometheus/test/streams.spec.ts +++ b/packages/metrics-prometheus/test/streams.spec.ts @@ -1,46 +1,13 @@ -import { generateKeyPair } from '@libp2p/crypto/keys' -import { connectionPair, mockRegistrar, mockMultiaddrConnPair } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' -import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { multiaddr } from '@multiformats/multiaddr' +import { multiaddrConnectionPair, streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' -import drain from 'it-drain' -import { pipe } from 'it-pipe' -import defer from 'p-defer' import client from 'prom-client' +import { raceEvent } from 'race-event' import { prometheusMetrics } from '../src/index.js' -import type { Connection } from '@libp2p/interface' describe('streams', () => { - let connectionA: Connection - let connectionB: Connection - - afterEach(async () => { - if (connectionA != null) { - await connectionA.close() - } - - if (connectionB != null) { - await connectionB.close() - } - }) - - it('should track bytes sent over connections', async () => { - const deferred = defer() - const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - - const { outbound, inbound } = mockMultiaddrConnPair({ - addrs: [ - multiaddr('/ip4/123.123.123.123/tcp/5923'), - multiaddr('/ip4/123.123.123.123/tcp/5924') - ], - remotePeer - }) - - // process all the bytes - void pipe(inbound, drain).then(() => { - deferred.resolve() - }) + it('should track bytes sent over outbound connections', async () => { + const [outbound, inbound] = multiaddrConnectionPair() const metrics = prometheusMetrics()({ logger: defaultLogger() @@ -51,127 +18,71 @@ describe('streams', () => { // send data to the remote over the tracked stream const data = Uint8Array.from([0, 1, 2, 3, 4]) - await outbound.sink(async function * () { - yield data - }()) - - // wait for all bytes to be received - await deferred.promise + outbound.send(data) + await outbound.closeWrite() + await raceEvent(inbound, 'close') const scrapedMetrics = await client.register.metrics() expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="global sent"} ${data.length}`) }) - it('should track bytes received over connections', async () => { - const deferred = defer() - const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - - const { outbound, inbound } = mockMultiaddrConnPair({ - addrs: [ - multiaddr('/ip4/123.123.123.123/tcp/5923'), - multiaddr('/ip4/123.123.123.123/tcp/5924') - ], - remotePeer - }) + it('should track bytes received over outbound connections', async () => { + const [outbound, inbound] = multiaddrConnectionPair() const metrics = prometheusMetrics()({ logger: defaultLogger() }) // track incoming stream - metrics.trackMultiaddrConnection(inbound) + metrics.trackMultiaddrConnection(outbound) // send data to the remote over the tracked stream const data = Uint8Array.from([0, 1, 2, 3, 4]) - await outbound.sink(async function * () { - yield data - }()) - - // process all the bytes - void pipe(inbound, drain).then(() => { - deferred.resolve() - }) - - // wait for all bytes to be received - await deferred.promise + inbound.send(data) + await inbound.closeWrite() + await raceEvent(outbound, 'close') const scrapedMetrics = await client.register.metrics() expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="global received"} ${data.length}`) }) - it('should track sent stream metrics', async () => { - const protocol = '/my-protocol-send/1.0.0' - const peerA = { - peerId: peerIdFromPrivateKey(await generateKeyPair('Ed25519')), - registrar: mockRegistrar() - } - const peerB = { - peerId: peerIdFromPrivateKey(await generateKeyPair('Ed25519')), - registrar: mockRegistrar() - } - await peerB.registrar.handle(protocol, ({ stream }) => { - void pipe(stream, drain) - }) - - ;[connectionA, connectionB] = connectionPair(peerA, peerB) - const aToB = await connectionA.newStream(protocol) + it('should track bytes sent over outbound streams', async () => { + const [outbound, inbound] = await streamPair() const metrics = prometheusMetrics()({ logger: defaultLogger() }) // track outgoing stream - metrics.trackProtocolStream(aToB, connectionA) + metrics.trackProtocolStream(outbound) // send data to the remote over the tracked stream const data = Uint8Array.from([0, 1, 2, 3, 4]) - await aToB.sink([ - data - ]) + outbound.send(data) + await outbound.closeWrite() + await raceEvent(inbound, 'close') const scrapedMetrics = await client.register.metrics() - expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="${protocol} sent"} ${data.length}`) + expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="${outbound.protocol} sent"} ${data.length}`) }) - it('should track sent received metrics', async () => { - const deferred = defer() - const protocol = '/my-protocol-receive/1.0.0' - const peerA = { - peerId: peerIdFromPrivateKey(await generateKeyPair('Ed25519')), - registrar: mockRegistrar() - } - await peerA.registrar.handle(protocol, ({ stream, connection }) => { - // track incoming stream - metrics.trackProtocolStream(stream, connectionA) - - // ignore data - void pipe(stream, drain).then(() => { - deferred.resolve() - }) - }) - const peerB = { - peerId: peerIdFromPrivateKey(await generateKeyPair('Ed25519')), - registrar: mockRegistrar() - } + it('should track bytes received over outbound streams', async () => { + const [outbound, inbound] = await streamPair() const metrics = prometheusMetrics()({ logger: defaultLogger() }) - ;[connectionA, connectionB] = connectionPair(peerA, peerB) - - const bToA = await connectionB.newStream(protocol) + // track incoming stream + metrics.trackProtocolStream(outbound) // send data from remote to local const data = Uint8Array.from([0, 1, 2, 3, 4]) - await bToA.sink([ - data - ]) - - // wait for data to have been transferred - await deferred.promise + inbound.send(data) + await inbound.closeWrite() + await raceEvent(outbound, 'close') const scrapedMetrics = await client.register.metrics() - expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="${protocol} received"} ${data.length}`) + expect(scrapedMetrics).to.include(`libp2p_data_transfer_bytes_total{protocol="${inbound.protocol} received"} ${data.length}`) }) }) diff --git a/packages/metrics-simple/package.json b/packages/metrics-simple/package.json index 9962b0c745..eff812404c 100644 --- a/packages/metrics-simple/package.json +++ b/packages/metrics-simple/package.json @@ -44,8 +44,6 @@ "dependencies": { "@libp2p/interface": "^2.10.5", "@libp2p/logger": "^5.1.21", - "it-foreach": "^2.1.3", - "it-stream-types": "^2.0.2", "tdigest": "^0.1.2" }, "devDependencies": { diff --git a/packages/metrics-simple/src/index.ts b/packages/metrics-simple/src/index.ts index 1445c3e9c6..b2064e2721 100644 --- a/packages/metrics-simple/src/index.ts +++ b/packages/metrics-simple/src/index.ts @@ -25,10 +25,8 @@ import { serviceCapabilities } from '@libp2p/interface' import { logger } from '@libp2p/logger' -import each from 'it-foreach' import { TDigest } from 'tdigest' -import type { Startable, MultiaddrConnection, Stream, Connection, Metric, MetricGroup, StopTimer, Metrics, CalculatedMetricOptions, MetricOptions, Counter, CounterGroup, CalculateMetric, Histogram, HistogramOptions, HistogramGroup, Summary, SummaryOptions, SummaryGroup, CalculatedHistogramOptions, CalculatedSummaryOptions, ComponentLogger, Logger } from '@libp2p/interface' -import type { Duplex } from 'it-stream-types' +import type { Startable, MultiaddrConnection, Stream, Metric, MetricGroup, StopTimer, Metrics, CalculatedMetricOptions, MetricOptions, Counter, CounterGroup, CalculateMetric, Histogram, HistogramOptions, HistogramGroup, Summary, SummaryOptions, SummaryGroup, CalculatedHistogramOptions, CalculatedSummaryOptions, ComponentLogger, Logger, MessageStream } from '@libp2p/interface' const log = logger('libp2p:simple-metrics') @@ -417,27 +415,24 @@ class SimpleMetrics implements Metrics, Startable { * Override the sink/source of the stream to count the bytes * in and out */ - _track (stream: Duplex>, name: string): void { - const self = this + _track (stream: MessageStream, name: string): void { + stream.addEventListener('message', (evt) => { + this._incrementValue(`${name} received`, evt.data.byteLength) + }) - const sink = stream.sink - stream.sink = async function trackedSink (source) { - await sink(each(source, buf => { - self._incrementValue(`${name} sent`, buf.byteLength) - })) - } + const send = stream.send.bind(stream) + stream.send = (buf) => { + this._incrementValue(`${name} sent`, buf.byteLength) - const source = stream.source - stream.source = each(source, buf => { - self._incrementValue(`${name} received`, buf.byteLength) - }) + return send(buf) + } } trackMultiaddrConnection (maConn: MultiaddrConnection): void { this._track(maConn, 'global') } - trackProtocolStream (stream: Stream, connection: Connection): void { + trackProtocolStream (stream: Stream): void { if (stream.protocol == null) { // protocol not negotiated yet, should not happen as the upgrader // calls this handler after protocol negotiation diff --git a/packages/multistream-select/package.json b/packages/multistream-select/package.json index 8431888df7..8af384671f 100644 --- a/packages/multistream-select/package.json +++ b/packages/multistream-select/package.json @@ -52,23 +52,18 @@ }, "dependencies": { "@libp2p/interface": "^2.10.5", + "@libp2p/utils": "^6.7.1", "it-length-prefixed": "^10.0.1", - "it-length-prefixed-stream": "^2.0.2", - "it-stream-types": "^2.0.2", - "p-defer": "^4.0.1", - "race-signal": "^1.1.3", - "uint8-varint": "^2.0.4", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" }, "devDependencies": { - "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", "iso-random-stream": "^2.0.2", "it-all": "^3.0.8", "it-drain": "^3.0.9", - "it-pair": "^2.0.6", "it-pipe": "^3.0.1", + "p-event": "^6.0.1", "p-timeout": "^6.1.4" }, "sideEffects": false diff --git a/packages/multistream-select/src/handle.ts b/packages/multistream-select/src/handle.ts index c6494377e8..b82ee9c223 100644 --- a/packages/multistream-select/src/handle.ts +++ b/packages/multistream-select/src/handle.ts @@ -1,11 +1,11 @@ +import { lpStream } from '@libp2p/utils' import { encode } from 'it-length-prefixed' -import { lpStream } from 'it-length-prefixed-stream' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { MAX_PROTOCOL_LENGTH, PROTOCOL_ID } from './constants.js' -import * as multistream from './multistream.js' -import type { MultistreamSelectInit, ProtocolStream } from './index.js' -import type { Duplex } from 'it-stream-types' +import { readString } from './multistream.js' +import type { MultistreamSelectInit } from './index.js' +import type { MultiaddrConnection, MessageStream } from '@libp2p/interface' /** * Handle multistream protocol selections for the given list of protocols. @@ -53,34 +53,39 @@ import type { Duplex } from 'it-stream-types' * }) * ``` */ -export async function handle > (stream: Stream, protocols: string | string[], options: MultistreamSelectInit): Promise> { +export async function handle (stream: Stream, protocols: string | string[], options: MultistreamSelectInit = {}): Promise { protocols = Array.isArray(protocols) ? protocols : [protocols] - options.log.trace('handle: available protocols %s', protocols) + + const log = stream.log.newScope('mss:handle') + log.trace('available protocols %s', protocols) const lp = lpStream(stream, { ...options, maxDataLength: MAX_PROTOCOL_LENGTH, - maxLengthLength: 2 // 2 bytes is enough to length-prefix MAX_PROTOCOL_LENGTH + maxLengthLength: 2, // 2 bytes is enough to length-prefix MAX_PROTOCOL_LENGTH + stopPropagation: true }) while (true) { - options.log.trace('handle: reading incoming string') - const protocol = await multistream.readString(lp, options) - options.log.trace('handle: read "%s"', protocol) + log.trace('reading incoming string') + const protocol = await readString(lp, options) + log.trace('read "%s"', protocol) if (protocol === PROTOCOL_ID) { - options.log.trace('handle: respond with "%s" for "%s"', PROTOCOL_ID, protocol) - await multistream.write(lp, uint8ArrayFromString(`${PROTOCOL_ID}\n`), options) - options.log.trace('handle: responded with "%s" for "%s"', PROTOCOL_ID, protocol) + log.trace('respond with "%s" for "%s"', PROTOCOL_ID, protocol) + await lp.write(uint8ArrayFromString(`${PROTOCOL_ID}\n`), options) + log.trace('responded with "%s" for "%s"', PROTOCOL_ID, protocol) continue } if (protocols.includes(protocol)) { - options.log.trace('handle: respond with "%s" for "%s"', protocol, protocol) - await multistream.write(lp, uint8ArrayFromString(`${protocol}\n`), options) - options.log.trace('handle: responded with "%s" for "%s"', protocol, protocol) + log.trace('respond with "%s" for "%s"', protocol, protocol) + await lp.write(uint8ArrayFromString(`${protocol}\n`), options) + log.trace('responded with "%s" for "%s"', protocol, protocol) + + lp.unwrap() - return { stream: lp.unwrap(), protocol } + return protocol } if (protocol === 'ls') { @@ -90,14 +95,14 @@ export async function handle > (stream: Str uint8ArrayFromString('\n') ) - options.log.trace('handle: respond with "%s" for %s', protocols, protocol) - await multistream.write(lp, protos, options) - options.log.trace('handle: responded with "%s" for %s', protocols, protocol) + log.trace('respond with "%s" for %s', protocols, protocol) + await lp.write(protos, options) + log.trace('responded with "%s" for %s', protocols, protocol) continue } - options.log.trace('handle: respond with "na" for "%s"', protocol) - await multistream.write(lp, uint8ArrayFromString('na\n'), options) - options.log('handle: responded with "na" for "%s"', protocol) + log.trace('respond with "na" for "%s"', protocol) + await lp.write(uint8ArrayFromString('na\n'), options) + log('responded with "na" for "%s"', protocol) } } diff --git a/packages/multistream-select/src/index.ts b/packages/multistream-select/src/index.ts index a4ffa25cc1..e4aa869d1c 100644 --- a/packages/multistream-select/src/index.ts +++ b/packages/multistream-select/src/index.ts @@ -21,17 +21,12 @@ */ import { PROTOCOL_ID } from './constants.js' -import type { AbortOptions, LoggerOptions } from '@libp2p/interface' -import type { LengthPrefixedStreamOpts } from 'it-length-prefixed-stream' +import type { AbortOptions } from '@libp2p/interface' +import type { LengthPrefixedStreamOpts } from '@libp2p/utils' export { PROTOCOL_ID } -export interface ProtocolStream { - stream: Stream - protocol: string -} - -export interface MultistreamSelectInit extends AbortOptions, LoggerOptions, Partial { +export interface MultistreamSelectInit extends AbortOptions, Partial { /** * When false, and only a single protocol is being negotiated, use optimistic * select to send both the protocol name and the first data buffer in the @@ -43,5 +38,4 @@ export interface MultistreamSelectInit extends AbortOptions, LoggerOptions, Part } export { select } from './select.js' -export type { SelectStream } from './select.js' export { handle } from './handle.js' diff --git a/packages/multistream-select/src/multistream.ts b/packages/multistream-select/src/multistream.ts index aa89a4f4cc..18729856e7 100644 --- a/packages/multistream-select/src/multistream.ts +++ b/packages/multistream-select/src/multistream.ts @@ -1,46 +1,21 @@ import { InvalidMessageError } from '@libp2p/interface' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import type { AbortOptions, LoggerOptions } from '@libp2p/interface' -import type { LengthPrefixedStream } from 'it-length-prefixed-stream' -import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { AbortOptions } from '@libp2p/interface' +import type { LengthPrefixedStream } from '@libp2p/utils' const NewLine = uint8ArrayFromString('\n') /** - * `write` encodes and writes a single buffer - */ -export async function write (writer: LengthPrefixedStream, Source>>, buffer: Uint8Array | Uint8ArrayList, options?: AbortOptions): Promise { - await writer.write(buffer, options) -} - -/** - * `writeAll` behaves like `write`, except it encodes an array of items as a single write - */ -export async function writeAll (writer: LengthPrefixedStream, Source>>, buffers: Uint8Array[], options?: AbortOptions): Promise { - await writer.writeV(buffers, options) -} - -/** - * Read a length-prefixed buffer from the passed stream, stripping the final newline character + * Read a length-prefixed string from the passed stream, stripping the final newline character */ -export async function read (reader: LengthPrefixedStream, Source>>, options: AbortOptions & LoggerOptions): Promise { +export async function readString (reader: LengthPrefixedStream, options?: AbortOptions): Promise { const buf = await reader.read(options) + const arr = buf.subarray() - if (buf.byteLength === 0 || buf.get(buf.byteLength - 1) !== NewLine[0]) { - options.log.error('Invalid mss message - missing newline', buf) + if (arr.byteLength === 0 || arr[arr.length - 1] !== NewLine[0]) { throw new InvalidMessageError('Missing newline') } - return buf.sublist(0, -1) // Remove newline -} - -/** - * Read a length-prefixed string from the passed stream, stripping the final newline character - */ -export async function readString (reader: LengthPrefixedStream, Source>>, options: AbortOptions & LoggerOptions): Promise { - const buf = await read(reader, options) - - return uint8ArrayToString(buf.subarray()) + return uint8ArrayToString(arr).trimEnd() } diff --git a/packages/multistream-select/src/select.ts b/packages/multistream-select/src/select.ts index d877fd4a28..c118d4cfae 100644 --- a/packages/multistream-select/src/select.ts +++ b/packages/multistream-select/src/select.ts @@ -1,23 +1,11 @@ import { UnsupportedProtocolError } from '@libp2p/interface' -import { lpStream } from 'it-length-prefixed-stream' -import pDefer from 'p-defer' -import { raceSignal } from 'race-signal' -import * as varint from 'uint8-varint' -import { Uint8ArrayList } from 'uint8arraylist' +import { lpStream } from '@libp2p/utils' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { MAX_PROTOCOL_LENGTH } from './constants.js' -import * as multistream from './multistream.js' +import { readString } from './multistream.js' import { PROTOCOL_ID } from './index.js' -import type { MultistreamSelectInit, ProtocolStream } from './index.js' -import type { AbortOptions } from '@libp2p/interface' -import type { Duplex } from 'it-stream-types' - -export interface SelectStream extends Duplex { - readStatus?: string - closeWrite?(options?: AbortOptions): Promise - closeRead?(options?: AbortOptions): Promise - close?(options?: AbortOptions): Promise -} +import type { MultistreamSelectInit } from './index.js' +import type { MessageStream } from '@libp2p/interface' /** * Negotiate a protocol to use from a list of protocols. @@ -62,16 +50,15 @@ export interface SelectStream extends Duplex { * // } * ``` */ -export async function select (stream: Stream, protocols: string | string[], options: MultistreamSelectInit): Promise> { +export async function select (stream: Stream, protocols: string | string[], options: MultistreamSelectInit = {}): Promise { protocols = Array.isArray(protocols) ? [...protocols] : [protocols] - if (protocols.length === 1 && options.negotiateFully === false) { - return optimisticSelect(stream, protocols[0], options) - } + const log = stream.log.newScope('mss:select') const lp = lpStream(stream, { ...options, - maxDataLength: MAX_PROTOCOL_LENGTH + maxDataLength: MAX_PROTOCOL_LENGTH, + stopPropagation: true }) const protocol = protocols.shift() @@ -79,287 +66,46 @@ export async function select (stream: Stream, prot throw new Error('At least one protocol must be specified') } - options.log.trace('select: write ["%s", "%s"]', PROTOCOL_ID, protocol) + log.trace('write ["%s", "%s"]', PROTOCOL_ID, protocol) const p1 = uint8ArrayFromString(`${PROTOCOL_ID}\n`) const p2 = uint8ArrayFromString(`${protocol}\n`) - await multistream.writeAll(lp, [p1, p2], options) + await lp.writeV([p1, p2], options) - options.log.trace('select: reading multistream-select header') - let response = await multistream.readString(lp, options) - options.log.trace('select: read "%s"', response) + log.trace('reading multistream-select header') + let response = await readString(lp, options) + log.trace('read "%s"', response) // Read the protocol response if we got the protocolId in return if (response === PROTOCOL_ID) { - options.log.trace('select: reading protocol response') - response = await multistream.readString(lp, options) - options.log.trace('select: read "%s"', response) + log.trace('reading protocol response') + response = await readString(lp, options) + log.trace('read "%s"', response) } // We're done if (response === protocol) { - return { stream: lp.unwrap(), protocol } + log.trace('selected "%s" as first offer', response) + lp.unwrap() + + return protocol } // We haven't gotten a valid ack, try the other protocols for (const protocol of protocols) { - options.log.trace('select: write "%s"', protocol) - await multistream.write(lp, uint8ArrayFromString(`${protocol}\n`), options) - options.log.trace('select: reading protocol response') - const response = await multistream.readString(lp, options) - options.log.trace('select: read "%s" for "%s"', response, protocol) - - if (response === protocol) { - return { stream: lp.unwrap(), protocol } - } - } - - throw new UnsupportedProtocolError('protocol selection failed') -} - -/** - * Optimistically negotiates a protocol. - * - * It *does not* block writes waiting for the other end to respond. Instead, it - * simply assumes the negotiation went successfully and starts writing data. - * - * Use when it is known that the receiver supports the desired protocol. - */ -function optimisticSelect (stream: Stream, protocol: string, options: MultistreamSelectInit): ProtocolStream { - const originalSink = stream.sink.bind(stream) - const originalSource = stream.source - - let negotiated = false - let negotiating = false - const doneNegotiating = pDefer() - - let sentProtocol = false - let sendingProtocol = false - const doneSendingProtocol = pDefer() - - let readProtocol = false - let readingProtocol = false - const doneReadingProtocol = pDefer() - - const lp = lpStream({ - sink: originalSink, - source: originalSource - }, { - ...options, - maxDataLength: MAX_PROTOCOL_LENGTH - }) - - stream.sink = async source => { - const { sink } = lp.unwrap() - - await sink(async function * () { - let sentData = false - - for await (const buf of source) { - // started reading before the source yielded, wait for protocol send - if (sendingProtocol) { - await doneSendingProtocol.promise - } - - // writing before reading, send the protocol and the first chunk of data - if (!sentProtocol) { - sendingProtocol = true - - options.log.trace('optimistic: write ["%s", "%s", data(%d)] in sink', PROTOCOL_ID, protocol, buf.byteLength) - - const protocolString = `${protocol}\n` - - // send protocols in first chunk of data written to transport - yield new Uint8ArrayList( - Uint8Array.from([19]), // length of PROTOCOL_ID plus newline - uint8ArrayFromString(`${PROTOCOL_ID}\n`), - varint.encode(protocolString.length), - uint8ArrayFromString(protocolString), - buf - ).subarray() - - options.log.trace('optimistic: wrote ["%s", "%s", data(%d)] in sink', PROTOCOL_ID, protocol, buf.byteLength) - - sentProtocol = true - sendingProtocol = false - doneSendingProtocol.resolve() - - // read the negotiation response but don't block more sending - negotiate() - .catch(err => { - options.log.error('could not finish optimistic protocol negotiation of %s', protocol, err) - }) - } else { - yield buf - } - - sentData = true - } - - // special case - the source passed to the sink has ended but we didn't - // negotiated the protocol yet so do it now - if (!sentData) { - await negotiate() - } - }()) - } - - async function negotiate (): Promise { - if (negotiating) { - options.log.trace('optimistic: already negotiating %s stream', protocol) - await doneNegotiating.promise - return - } - - negotiating = true + log.trace('write "%s"', protocol) + await lp.write(uint8ArrayFromString(`${protocol}\n`), options) - try { - // we haven't sent the protocol yet, send it now - if (!sentProtocol) { - options.log.trace('optimistic: doing send protocol for %s stream', protocol) - await doSendProtocol() - } + log.trace('reading protocol response') + const response = await readString(lp, options) + log.trace('read "%s" for "%s"', response, protocol) - // if we haven't read the protocol response yet, do it now - if (!readProtocol) { - options.log.trace('optimistic: doing read protocol for %s stream', protocol) - await doReadProtocol() - } - } finally { - negotiating = false - negotiated = true - doneNegotiating.resolve() - } - } - - async function doSendProtocol (): Promise { - if (sendingProtocol) { - await doneSendingProtocol.promise - return - } - - sendingProtocol = true - - try { - options.log.trace('optimistic: write ["%s", "%s", data] in source', PROTOCOL_ID, protocol) - await lp.writeV([ - uint8ArrayFromString(`${PROTOCOL_ID}\n`), - uint8ArrayFromString(`${protocol}\n`) - ]) - options.log.trace('optimistic: wrote ["%s", "%s", data] in source', PROTOCOL_ID, protocol) - } finally { - sentProtocol = true - sendingProtocol = false - doneSendingProtocol.resolve() - } - } - - async function doReadProtocol (): Promise { - if (readingProtocol) { - await doneReadingProtocol.promise - return - } - - readingProtocol = true - - try { - options.log.trace('optimistic: reading multistream select header') - let response = await multistream.readString(lp, options) - options.log.trace('optimistic: read multistream select header "%s"', response) - - if (response === PROTOCOL_ID) { - response = await multistream.readString(lp, options) - } - - options.log.trace('optimistic: read protocol "%s", expecting "%s"', response, protocol) - - if (response !== protocol) { - throw new UnsupportedProtocolError('protocol selection failed') - } - } finally { - readProtocol = true - readingProtocol = false - doneReadingProtocol.resolve() - } - } - - stream.source = (async function * () { - // make sure we've done protocol negotiation before we read stream data - await negotiate() - - options.log.trace('optimistic: reading data from "%s" stream', protocol) - yield * lp.unwrap().source - })() - - if (stream.closeRead != null) { - const originalCloseRead = stream.closeRead.bind(stream) - - stream.closeRead = async (opts) => { - // we need to read & write to negotiate the protocol so ensure we've done - // this before closing the readable end of the stream - if (!negotiated) { - await negotiate().catch(err => { - options.log.error('could not negotiate protocol before close read', err) - }) - } - - // protocol has been negotiated, ok to close the readable end - await originalCloseRead(opts) - } - } - - if (stream.closeWrite != null) { - const originalCloseWrite = stream.closeWrite.bind(stream) - - stream.closeWrite = async (opts) => { - // we need to read & write to negotiate the protocol so ensure we've done - // this before closing the writable end of the stream - if (!negotiated) { - await negotiate().catch(err => { - options.log.error('could not negotiate protocol before close write', err) - }) - } - - // protocol has been negotiated, ok to close the writable end - await originalCloseWrite(opts) - } - } - - if (stream.close != null) { - const originalClose = stream.close.bind(stream) - - stream.close = async (opts) => { - // if we are in the process of negotiation, let it finish before closing - // because we may have unsent early data - const tasks = [] - - if (sendingProtocol) { - tasks.push(doneSendingProtocol.promise) - } - - if (readingProtocol) { - tasks.push(doneReadingProtocol.promise) - } - - if (tasks.length > 0) { - // let the in-flight protocol negotiation finish gracefully - await raceSignal( - Promise.all(tasks), - opts?.signal - ) - } else { - // no protocol negotiation attempt has occurred so don't start one - negotiated = true - negotiating = false - doneNegotiating.resolve() - } + if (response === protocol) { + log.trace('selected "%s" after negotiation', response) + lp.unwrap() - // protocol has been negotiated, ok to close the writable end - await originalClose(opts) + return protocol } } - return { - stream, - protocol - } + throw new UnsupportedProtocolError('protocol selection failed') } diff --git a/packages/multistream-select/test/dialer.spec.ts b/packages/multistream-select/test/dialer.spec.ts index b0a86807b3..cba513df56 100644 --- a/packages/multistream-select/test/dialer.spec.ts +++ b/packages/multistream-select/test/dialer.spec.ts @@ -1,13 +1,11 @@ /* eslint-env mocha */ /* eslint max-nested-callbacks: ["error", 5] */ -import { logger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import randomBytes from 'iso-random-stream/src/random.js' import all from 'it-all' -import drain from 'it-drain' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' +import { pEvent } from 'p-event' import pTimeout from 'p-timeout' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' @@ -17,72 +15,93 @@ describe('Dialer', () => { describe('dialer.select', () => { it('should select from single protocol on outgoing stream', async () => { const protocol = '/echo/1.0.0' - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const handled = mss.handle(incomingStream, protocol, { - log: logger('mss:test-incoming') - }) + const handled = mss.handle(incomingStream, protocol) - const selection = await mss.select(outgoingStream, protocol, { - log: logger('mss:test-outgoing') - }) - expect(selection.protocol).to.equal(protocol) + const selection = await mss.select(outgoingStream, protocol) + expect(selection).to.equal(protocol) // Ensure stream is usable after selection - send data outgoing -> incoming const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - void pipe(input, selection.stream, drain) + + for (const buf of input) { + if (!outgoingStream.send(buf)) { + await pEvent(outgoingStream, 'drain') + } + } + + await Promise.all([ + outgoingStream.closeWrite(), + incomingStream.closeWrite() + ]) + + const output = all(incomingStream) // wait for incoming end to have completed negotiation await handled - const output = await all(incomingStream.source) - expect(new Uint8ArrayList(...output).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...(await output)).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) it('should select from single protocol on incoming stream', async () => { - const protocol = '/echo/1.0.0' - const [outgoingStream, incomingStream] = duplexPair() - const input = [randomBytes(10), randomBytes(64), randomBytes(3)] + const protocol = '/foo/1.0.0' + const [outgoingStream, incomingStream] = await streamPair() + const input = new Uint8ArrayList( + randomBytes(10), + randomBytes(64), + randomBytes(3) + ) + const receivedAllData = Promise.withResolvers() + + const [selection] = await Promise.all([ + mss.handle(incomingStream, protocol), + mss.select(outgoingStream, protocol, { + negotiateFully: false + }) + ]) + expect(selection).to.equal(protocol) - void mss.select(outgoingStream, protocol, { - log: logger('mss:test-outgoing'), - negotiateFully: false - }) + await outgoingStream.closeWrite() - // have to interact with the stream to start protocol negotiation - const outgoingSourceData = all(outgoingStream.source) + const output = new Uint8ArrayList() + outgoingStream.addEventListener('message', (evt) => { + output.append(evt.data) - const selection = await mss.handle(incomingStream, protocol, { - log: logger('mss:test-incoming') + if (output.byteLength === input.byteLength) { + receivedAllData.resolve() + } }) - expect(selection.protocol).to.equal(protocol) - - // Ensure stream is usable after selection - send data incoming -> outgoing - void pipe(input, selection.stream) + // ensure stream is usable after selection + // - send data incoming -> outgoing + for (const buf of input) { + if (!incomingStream.send(buf)) { + await pEvent(incomingStream, 'drain') + } + } + + await Promise.all([ + pEvent(outgoingStream, 'close'), + incomingStream.closeWrite(), + receivedAllData.promise + ]) - const output = await outgoingSourceData - expect(new Uint8ArrayList(...output).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(output.subarray()).to.equalBytes(input.subarray()) }) it('should fail to select twice', async () => { const protocol = '/echo/1.0.0' const protocol2 = '/echo/2.0.0' - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - void mss.handle(incomingStream, protocol, { - log: logger('mss:test-incoming') - }) + void mss.handle(incomingStream, protocol) - const selection = await mss.select(outgoingStream, protocol, { - log: logger('mss:test-outgoing') - }) - expect(selection.protocol).to.equal(protocol) + const selection = await mss.select(outgoingStream, protocol) + expect(selection).to.equal(protocol) // A second select will timeout - await pTimeout(mss.select(outgoingStream, [protocol, protocol2], { - log: logger('mss:test-outgoing') - }), { + await pTimeout(mss.select(outgoingStream, [protocol, protocol2]), { milliseconds: 1e3 }) .then(() => expect.fail('should have timed out'), (err) => { @@ -93,60 +112,64 @@ describe('Dialer', () => { it('should select from multiple protocols', async () => { const protocols = ['/echo/2.0.0', '/echo/1.0.0'] const selectedProtocol = protocols[protocols.length - 1] - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - void mss.handle(incomingStream, ['/nope/1.0.0', selectedProtocol], { - log: logger('mss:test-incoming') - }) + void mss.handle(incomingStream, ['/nope/1.0.0', selectedProtocol]) - const selection = await mss.select(outgoingStream, protocols, { - log: logger('mss:test-outgoing') - }) - expect(protocols).to.have.length(2) - expect(selection.protocol).to.equal(selectedProtocol) + const selection = await mss.select(outgoingStream, protocols) + expect(selection).to.equal(selectedProtocol) + + const output = all(incomingStream) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - void pipe(input, selection.stream) - const output = await all(incomingStream.source) - expect(new Uint8ArrayList(...output).slice()).to.eql(new Uint8ArrayList(...input).slice()) + for (const buf of input) { + if (!outgoingStream.send(buf)) { + await pEvent(outgoingStream, 'drain') + } + } + + await Promise.all([ + outgoingStream.closeWrite(), + incomingStream.closeWrite() + ]) + + expect(new Uint8ArrayList(...(await output)).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) it('should throw if protocol selection fails', async () => { const protocol = ['/echo/2.0.0', '/echo/1.0.0'] - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - void mss.handle(incomingStream, ['/nope/1.0.0', '/still/nope/1.0.0'], { - log: logger('mss:test-incoming') - }) + void mss.handle(incomingStream, ['/nope/1.0.0', '/still/nope/1.0.0']) - await expect(mss.select(outgoingStream, protocol, { - log: logger('mss:test-outgoing') - })).to.eventually.be.rejected + await expect(mss.select(outgoingStream, protocol)).to.eventually.be.rejected .with.property('name', 'UnsupportedProtocolError') }) }) - describe('dialer optimistic select', () => { + describe.skip('dialer optimistic select', () => { it('should optimistically select a single protocol when negotiateFully is false', async () => { const protocol = '/echo/1.0.0' - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() const selection = await mss.select(outgoingStream, [protocol], { - log: logger('mss:test-optimistic'), negotiateFully: false }) - expect(selection.protocol).to.equal(protocol) + expect(selection).to.equal(protocol) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] + const output = all(incomingStream) - const [, output] = await Promise.all([ - pipe(input, selection.stream), - all(incomingStream.source) - ]) + for (const buf of input) { + if (!outgoingStream.send(buf)) { + await pEvent(outgoingStream, 'drain') + } + } + await outgoingStream.closeWrite() - expect(new Uint8ArrayList(...output).subarray()).to.equalBytes(new Uint8ArrayList( + expect(new Uint8ArrayList(...(await output)).subarray()).to.equalBytes(new Uint8ArrayList( Uint8Array.from([19]), uint8ArrayFromString(`${mss.PROTOCOL_ID}\n`), Uint8Array.from([12]), @@ -157,23 +180,27 @@ describe('Dialer', () => { it('should not optimistically select a single protocol when negotiateFully is true', async () => { const protocols = ['/echo/1.0.0'] const selectedProtocol = protocols[protocols.length - 1] - const [outgoingStream, incomingStream] = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - void mss.handle(incomingStream, [selectedProtocol], { - log: logger('mss:test-incoming') - }) + void mss.handle(incomingStream, [selectedProtocol]) const selection = await mss.select(outgoingStream, protocols, { - log: logger('mss:test-un-optimistic'), negotiateFully: true }) - expect(selection.protocol).to.equal(selectedProtocol) + expect(selection).to.equal(selectedProtocol) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - void pipe(input, selection.stream) - const output = await all(incomingStream.source) - expect(new Uint8ArrayList(...output).slice()).to.eql(new Uint8ArrayList(...input).slice()) + const output = all(incomingStream) + + for (const buf of input) { + if (!outgoingStream.send(buf)) { + await pEvent(outgoingStream, 'drain') + } + } + await outgoingStream.closeWrite() + + expect(new Uint8ArrayList(...(await output)).slice()).to.eql(new Uint8ArrayList(...input).slice()) }) }) }) diff --git a/packages/multistream-select/test/integration.spec.ts b/packages/multistream-select/test/integration.spec.ts index 008900a70c..549b9a4ec3 100644 --- a/packages/multistream-select/test/integration.spec.ts +++ b/packages/multistream-select/test/integration.spec.ts @@ -1,11 +1,11 @@ /* eslint-env mocha */ -import { logger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import randomBytes from 'iso-random-stream/src/random.js' import all from 'it-all' -import { duplexPair } from 'it-pair/duplex' import { pipe } from 'it-pipe' +import { pEvent } from 'p-event' import { Uint8ArrayList } from 'uint8arraylist' import * as mss from '../src/index.js' @@ -13,234 +13,281 @@ describe('Dialer and Listener integration', () => { it('should handle and select', async () => { const protocols = ['/echo/2.0.0', '/echo/1.0.0'] const selectedProtocol = protocols[protocols.length - 1] - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() const [dialerSelection, listenerSelection] = await Promise.all([ - mss.select(pair[0], protocols, { - log: logger('mss:test') - }), - mss.handle(pair[1], selectedProtocol, { - log: logger('mss:test') - }) + mss.select(outgoingStream, protocols), + mss.handle(incomingStream, selectedProtocol) ]) - expect(dialerSelection.protocol).to.equal(selectedProtocol) - expect(listenerSelection.protocol).to.equal(selectedProtocol) + expect(dialerSelection).to.equal(selectedProtocol) + expect(listenerSelection).to.equal(selectedProtocol) // Ensure stream is usable after selection const input = [new Uint8ArrayList(randomBytes(10), randomBytes(64), randomBytes(3))] const output = await Promise.all([ - pipe(input, dialerSelection.stream, async (source) => all(source)), - pipe(listenerSelection.stream, listenerSelection.stream) + (async function () { + for (const buf of input) { + if (!outgoingStream.send(buf)) { + await pEvent(outgoingStream, 'drain') + } + } + + await outgoingStream.closeWrite() + + return all(outgoingStream) + }()), + (async function () { + for await (const buf of incomingStream) { + if (!incomingStream.send(buf)) { + await pEvent(incomingStream, 'drain') + } + } + + await incomingStream.closeWrite() + }()) ]) - expect(new Uint8ArrayList(...output[0]).slice()).to.eql(new Uint8ArrayList(...input).slice()) + + expect(new Uint8ArrayList(...output[0]).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) it('should handle, ls and select', async () => { const protocols = ['/echo/2.0.0', '/echo/1.0.0'] const selectedProtocol = protocols[protocols.length - 1] - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() const [listenerSelection, dialerSelection] = await Promise.all([ - mss.handle(pair[1], selectedProtocol, { - log: logger('mss:test') - }), - (async () => mss.select(pair[0], protocols, { - log: logger('mss:test') - }))() + mss.handle(incomingStream, selectedProtocol), + (async () => mss.select(outgoingStream, protocols))() ]) - expect(dialerSelection.protocol).to.equal(selectedProtocol) - expect(listenerSelection.protocol).to.equal(selectedProtocol) + expect(dialerSelection).to.equal(selectedProtocol) + expect(listenerSelection).to.equal(selectedProtocol) // Ensure stream is usable after selection const input = [new Uint8ArrayList(randomBytes(10), randomBytes(64), randomBytes(3))] const output = await Promise.all([ - pipe(input, dialerSelection.stream, async (source) => all(source)), - pipe(listenerSelection.stream, listenerSelection.stream) + (async function () { + for (const buf of input) { + outgoingStream.send(buf) + } + + await outgoingStream.closeWrite() + + return all(outgoingStream) + }()), + (async function () { + for await (const buf of incomingStream) { + incomingStream.send(buf) + } + + incomingStream.closeWrite() + }()) ]) - expect(new Uint8ArrayList(...output[0]).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...output[0]).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) it('should handle and select with Uint8Array streams', async () => { const protocols = ['/echo/2.0.0', '/echo/1.0.0'] const selectedProtocol = protocols[protocols.length - 1] - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() const [dialerSelection, listenerSelection] = await Promise.all([ - mss.select(pair[0], protocols, { - log: logger('mss:test') - }), - mss.handle(pair[1], selectedProtocol, { - log: logger('mss:test') - }) + mss.select(outgoingStream, protocols), + mss.handle(incomingStream, selectedProtocol) ]) - expect(dialerSelection.protocol).to.equal(selectedProtocol) - expect(listenerSelection.protocol).to.equal(selectedProtocol) + expect(dialerSelection).to.equal(selectedProtocol) + expect(listenerSelection).to.equal(selectedProtocol) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] const output = await Promise.all([ - pipe(input, dialerSelection.stream, async (source) => all(source)), - pipe(listenerSelection.stream, listenerSelection.stream) + (async function () { + for (const buf of input) { + outgoingStream.send(buf) + } + + outgoingStream.closeWrite() + + return all(outgoingStream) + }()), + (async function () { + for await (const buf of incomingStream) { + incomingStream.send(buf) + } + + incomingStream.closeWrite() + }()) ]) - expect(new Uint8ArrayList(...output[0]).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...output[0]).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) - it('should handle and optimistically select', async () => { + it.skip('should handle and optimistically select', async () => { const protocol = '/echo/1.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:test'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] // Since the stream is lazy, we need to write to it before handling - const dialerOutPromise = pipe(input, dialerSelection.stream, async source => all(source)) + const dialerOutPromise = (async function () { + for (const buf of input) { + outgoingStream.send(buf) + } - const listenerSelection = await mss.handle(pair[1], protocol, { - log: logger('mss:test') - }) - expect(listenerSelection.protocol).to.equal(protocol) + outgoingStream.closeWrite() - await pipe(listenerSelection.stream, listenerSelection.stream) + return all(outgoingStream) + }()) + + const listenerSelection = await mss.handle(incomingStream, protocol) + expect(listenerSelection).to.equal(protocol) + + await (async function () { + for await (const buf of incomingStream) { + incomingStream.send(buf) + } + + incomingStream.closeWrite() + }()) const dialerOut = await dialerOutPromise - expect(new Uint8ArrayList(...dialerOut).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...dialerOut).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) - it('should handle and optimistically select that fails', async () => { + it.skip('should handle and optimistically select that fails', async () => { const protocol = '/echo/1.0.0' const otherProtocol = '/echo/2.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:test'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // the listener handles the incoming stream - void mss.handle(pair[1], otherProtocol, { - log: logger('mss:test') - }).catch(() => {}) + void mss.handle(incomingStream, otherProtocol).catch(() => {}) // should fail when we interact with the stream const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - await expect(pipe(input, dialerSelection.stream, async source => all(source))) + await expect(async function () { + for (const buf of input) { + outgoingStream.send(buf) + } + + outgoingStream.closeWrite() + + return all(outgoingStream) + }()) .to.eventually.be.rejected.with.property('name', 'UnsupportedProtocolError') }) - it('should handle and optimistically select only by reading', async () => { + it.skip('should handle and optimistically select only by reading', async () => { const protocol = '/echo/1.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:dialer'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] const [, dialerOut] = await Promise.all([ // the listener handles the incoming stream - mss.handle(pair[1], protocol, { - log: logger('mss:listener') - }).then(async result => { + mss.handle(incomingStream, protocol).then(async result => { // the listener writes to the incoming stream - await pipe(input, result.stream) + for (const buf of input) { + incomingStream.send(buf) + } }), // the dialer just reads from the stream - pipe(dialerSelection.stream, async source => all(source)) + pipe(outgoingStream, async source => all(source)) ]) - expect(new Uint8ArrayList(...dialerOut).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...dialerOut).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) - it('should handle and optimistically select only by writing', async () => { + it.skip('should handle and optimistically select only by writing', async () => { const protocol = '/echo/1.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:dialer'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] const [listenerOut] = await Promise.all([ // the listener handles the incoming stream - mss.handle(pair[1], protocol, { - log: logger('mss:listener') - }).then(async result => { + mss.handle(incomingStream, protocol).then(async result => { // the listener reads from the incoming stream - return pipe(result.stream, async source => all(source)) + return pipe(incomingStream, async source => all(source)) }), Promise.resolve().then(async () => { // the dialer just writes to the stream - await pair[0].sink(async function * () { - yield * input - }()) + for (const buf of input) { + outgoingStream.send(buf) + } }) ]) - expect(new Uint8ArrayList(...listenerOut).slice()).to.eql(new Uint8ArrayList(...input).slice()) + expect(new Uint8ArrayList(...listenerOut).slice()).to.deep.equal(new Uint8ArrayList(...input).slice()) }) - it('should handle and optimistically select only by reading that fails', async () => { + it.skip('should handle and optimistically select only by reading that fails', async () => { const protocol = '/echo/1.0.0' const otherProtocol = '/echo/2.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() // lazy succeeds - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:dialer'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // the listener handles the incoming stream - void mss.handle(pair[1], otherProtocol, { - log: logger('mss:listener') - }) + void mss.handle(incomingStream, otherProtocol) // should fail when we interact with the stream - await expect(pipe(dialerSelection.stream, async source => all(source))) + await expect(pipe(outgoingStream, async source => all(source))) .to.eventually.be.rejected.with.property('name', 'UnsupportedProtocolError') }) - it('should abort an unhandled optimistically select', async () => { + it.skip('should abort an unhandled optimistically select', async () => { const protocol = '/echo/1.0.0' - const pair = duplexPair() + const [outgoingStream, incomingStream] = await streamPair() - const dialerSelection = await mss.select(pair[0], [protocol], { - log: logger('mss:test'), + const dialerSelection = await mss.select(outgoingStream, [protocol], { negotiateFully: false }) - expect(dialerSelection.protocol).to.equal(protocol) + expect(dialerSelection).to.equal(protocol) // Ensure stream is usable after selection const input = [randomBytes(10), randomBytes(64), randomBytes(3)] // Since the stream is lazy, we need to write to it before handling - const dialerResultPromise = pipe(input, dialerSelection.stream, async source => all(source)) + const dialerResultPromise = (async function () { + for (const buf of input) { + outgoingStream.send(buf) + } + + return all(outgoingStream) + }()) // The error message from this varies depending on how much data got // written when the dialer receives the `na` response and closes the // stream, so we just assert that this rejects. - await expect(mss.handle(pair[1], '/unhandled/1.0.0', { - log: logger('mss:test') - })).to.eventually.be.rejected() + await expect(mss.handle(incomingStream, '/unhandled/1.0.0')).to.eventually.be.rejected() // Dialer should fail to negotiate the single protocol await expect(dialerResultPromise).to.eventually.be.rejected() diff --git a/packages/multistream-select/test/listener.spec.ts b/packages/multistream-select/test/listener.spec.ts index 102a2f1104..1d906ba2f4 100644 --- a/packages/multistream-select/test/listener.spec.ts +++ b/packages/multistream-select/test/listener.spec.ts @@ -1,11 +1,9 @@ /* eslint-env mocha */ -import { logger } from '@libp2p/logger' +import { lpStream, streamPair } from '@libp2p/utils' import { expect } from 'aegir/chai' import randomBytes from 'iso-random-stream/src/random.js' import drain from 'it-drain' -import { lpStream } from 'it-length-prefixed-stream' -import { duplexPair } from 'it-pair/duplex' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import * as mss from '../src/index.js' @@ -16,9 +14,8 @@ describe('Listener', () => { const protocol = '/echo/1.0.0' const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - const duplexes = duplexPair() - const outputStream = lpStream(duplexes[1]) - void drain(duplexes[1].source) + const [outgoingStream, incomingStream] = await streamPair() + const outputStream = lpStream(incomingStream) void outputStream.writeV([ uint8ArrayFromString(mss.PROTOCOL_ID + '\n'), @@ -26,24 +23,22 @@ describe('Listener', () => { ...input ]) - const selection = await mss.handle(duplexes[0], protocol, { - log: logger('mss:test') - }) - expect(selection.protocol).to.equal(protocol) + const selection = await mss.handle(outgoingStream, protocol) + expect(selection).to.equal(protocol) - const inputStream = lpStream(selection.stream) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[0])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[1])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[2])) + const inputStream = lpStream(outgoingStream) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[0])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[1])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[2])) }) it('should reject unhandled protocols', async () => { const protocol = '/echo/1.0.0' const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - const duplexes = duplexPair() - const outputStream = lpStream(duplexes[1]) - void drain(duplexes[1].source) + const [outgoingStream, incomingStream] = await streamPair() + const outputStream = lpStream(incomingStream) + void drain(incomingStream) void outputStream.writeV([ uint8ArrayFromString(mss.PROTOCOL_ID + '\n'), @@ -53,24 +48,21 @@ describe('Listener', () => { ...input ]) - const selection = await mss.handle(duplexes[0], protocol, { - log: logger('mss:test') - }) - expect(selection.protocol).to.equal(protocol) + const selection = await mss.handle(outgoingStream, protocol) + expect(selection).to.equal(protocol) - const inputStream = lpStream(selection.stream) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[0])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[1])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[2])) + const inputStream = lpStream(outgoingStream) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[0])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[1])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[2])) }) it('should reject when unsupported protocols are ignored', async () => { const protocol = '/echo/1.0.0' const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - const duplexes = duplexPair() - const outputStream = lpStream(duplexes[1]) - void drain(duplexes[1].source) + const [outgoingStream, incomingStream] = await streamPair() + const outputStream = lpStream(incomingStream) void outputStream.writeV([ uint8ArrayFromString(mss.PROTOCOL_ID + '\n'), @@ -78,18 +70,16 @@ describe('Listener', () => { ...input ]) - await expect(mss.handle(duplexes[0], protocol, { - log: logger('mss:test') - })).to.eventually.be.rejected() + await expect(mss.handle(outgoingStream, protocol)).to.eventually.be.rejected() }) it('should handle ls', async () => { const protocol = '/echo/1.0.0' const input = [randomBytes(10), randomBytes(64), randomBytes(3)] - const duplexes = duplexPair() - const outputStream = lpStream(duplexes[1]) - void drain(duplexes[1].source) + const [outgoingStream, incomingStream] = await streamPair() + const outputStream = lpStream(incomingStream) + void drain(incomingStream) void outputStream.writeV([ uint8ArrayFromString(mss.PROTOCOL_ID + '\n'), @@ -98,15 +88,13 @@ describe('Listener', () => { ...input ]) - const selection = await mss.handle(duplexes[0], protocol, { - log: logger('mss:test') - }) - expect(selection.protocol).to.equal(protocol) + const selection = await mss.handle(outgoingStream, protocol) + expect(selection).to.equal(protocol) - const inputStream = lpStream(selection.stream) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[0])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[1])) - await expect(inputStream.read()).to.eventually.eql(new Uint8ArrayList(input[2])) + const inputStream = lpStream(outgoingStream) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[0])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[1])) + await expect(inputStream.read()).to.eventually.deep.equal(new Uint8ArrayList(input[2])) }) }) }) diff --git a/packages/multistream-select/test/multistream.spec.ts b/packages/multistream-select/test/multistream.spec.ts index b4c0efb240..db163f5bfe 100644 --- a/packages/multistream-select/test/multistream.spec.ts +++ b/packages/multistream-select/test/multistream.spec.ts @@ -1,58 +1,37 @@ /* eslint-env mocha */ /* eslint max-nested-callbacks: ["error", 6] */ -import { logger } from '@libp2p/logger' +import { streamPair, lpStream } from '@libp2p/utils' import { expect } from 'aegir/chai' -import { lpStream } from 'it-length-prefixed-stream' -import { duplexPair } from 'it-pair/duplex' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import * as Multistream from '../src/multistream.js' +import { readString } from '../src/multistream.js' describe('Multistream', () => { - describe('Multistream.write', () => { - it('should encode and write a multistream-select message', async () => { - const input = uint8ArrayFromString(`TEST${Date.now()}`) - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1]) - - void Multistream.write(inputStream, input) - - const output = await outputStream.read() - expect(output.subarray()).to.equalBytes(input) - }) - }) - describe('Multistream.read', () => { it('should decode a multistream-select message', async () => { const input = `TEST${Date.now()}` - const inputBuf = uint8ArrayFromString(input) - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1]) + const [outgoingStream, incomingStream] = await streamPair() + const inputStream = lpStream(outgoingStream) + const outputStream = lpStream(incomingStream) void inputStream.write(uint8ArrayFromString(`${input}\n`)) - const output = await Multistream.read(outputStream, { - log: logger('mss:test') - }) - expect(output.subarray()).to.equalBytes(inputBuf) + const output = await readString(outputStream) + expect(output).to.equal(input) }) it('should throw for non-newline delimited message', async () => { const input = `TEST${Date.now()}` const inputBuf = uint8ArrayFromString(input) - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1]) + const [outgoingStream, incomingStream] = await streamPair() + const inputStream = lpStream(outgoingStream) + const outputStream = lpStream(incomingStream) void inputStream.write(inputBuf) - await expect(Multistream.read(outputStream, { - log: logger('mss:test') - })).to.eventually.be.rejected() + await expect(readString(outputStream)).to.eventually.be.rejected() .with.property('name', 'InvalidMessageError') }) @@ -60,32 +39,28 @@ describe('Multistream', () => { const input = new Uint8Array(10000) input[input.length - 1] = '\n'.charCodeAt(0) - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1], { + const [outgoingStream, incomingStream] = await streamPair() + const inputStream = lpStream(outgoingStream) + const outputStream = lpStream(incomingStream, { maxDataLength: 9999 }) void inputStream.write(input) - await expect(Multistream.read(outputStream, { - log: logger('mss:test') - })).to.eventually.be.rejected() + await expect(readString(outputStream)).to.eventually.be.rejected() .with.property('name', 'InvalidDataLengthError') }) it('should throw for a 0-length message', async () => { const input = new Uint8Array(0) - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1]) + const [outgoingStream, incomingStream] = await streamPair() + const inputStream = lpStream(outgoingStream) + const outputStream = lpStream(incomingStream) void inputStream.write(input) - await expect(Multistream.read(outputStream, { - log: logger('mss:test') - })).to.eventually.be.rejected() + await expect(readString(outputStream)).to.eventually.be.rejected() .with.property('name', 'InvalidMessageError') }) @@ -96,15 +71,14 @@ describe('Multistream', () => { const controller = new AbortController() controller.abort() - const duplexes = duplexPair() - const inputStream = lpStream(duplexes[0]) - const outputStream = lpStream(duplexes[1]) + const [outgoingStream, incomingStream] = await streamPair() + const inputStream = lpStream(outgoingStream) + const outputStream = lpStream(incomingStream) void inputStream.write(inputBuf) - await expect(Multistream.read(outputStream, { - signal: controller.signal, - log: logger('mss:test') + await expect(readString(outputStream, { + signal: controller.signal })).to.eventually.be.rejected.with.property('name', 'AbortError') }) }) diff --git a/packages/peer-collections/src/filter.ts b/packages/peer-collections/src/filter.ts index b17b21c3a9..0a37c5b994 100644 --- a/packages/peer-collections/src/filter.ts +++ b/packages/peer-collections/src/filter.ts @@ -1,6 +1,6 @@ -import { createScalableCuckooFilter } from '@libp2p/utils/filters' +import { createScalableCuckooFilter } from '@libp2p/utils' import type { PeerId } from '@libp2p/interface' -import type { Filter } from '@libp2p/utils/filters' +import type { Filter } from '@libp2p/utils' /** * Uses a Cuckoo filter to implement a mechanism for deduplicating PeerIds in a diff --git a/packages/peer-discovery-mdns/src/query.ts b/packages/peer-discovery-mdns/src/query.ts index edb780cbe3..6222884e45 100644 --- a/packages/peer-discovery-mdns/src/query.ts +++ b/packages/peer-discovery-mdns/src/query.ts @@ -1,5 +1,5 @@ import { peerIdFromString } from '@libp2p/peer-id' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' +import { isPrivate } from '@libp2p/utils' import { multiaddr, protocols } from '@multiformats/multiaddr' import type { LoggerOptions, PeerInfo } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' diff --git a/packages/peer-record/package.json b/packages/peer-record/package.json index 8e38b15e6a..077e4d6c29 100644 --- a/packages/peer-record/package.json +++ b/packages/peer-record/package.json @@ -51,7 +51,6 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/interface": "^2.10.5", "@libp2p/peer-id": "^5.1.8", - "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "multiformats": "^13.3.6", "protons-runtime": "^5.5.0", diff --git a/packages/peer-record/src/peer-record/index.ts b/packages/peer-record/src/peer-record/index.ts index 9fda0214af..ef6688d98d 100644 --- a/packages/peer-record/src/peer-record/index.ts +++ b/packages/peer-record/src/peer-record/index.ts @@ -1,5 +1,4 @@ import { peerIdFromMultihash } from '@libp2p/peer-id' -import { arrayEquals } from '@libp2p/utils/array-equals' import { multiaddr } from '@multiformats/multiaddr' import * as Digest from 'multiformats/hashes/digest' import { @@ -7,6 +6,7 @@ import { ENVELOPE_PAYLOAD_TYPE_PEER_RECORD } from './consts.js' import { PeerRecord as Protobuf } from './peer-record.js' +import { arrayEquals } from './utils.ts' import type { PeerId } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' import type { Uint8ArrayList } from 'uint8arraylist' diff --git a/packages/peer-record/src/peer-record/utils.ts b/packages/peer-record/src/peer-record/utils.ts new file mode 100644 index 0000000000..91fb4f333e --- /dev/null +++ b/packages/peer-record/src/peer-record/utils.ts @@ -0,0 +1,15 @@ +/** + * Verify if two arrays of non primitive types with the "equals" function are equal. + * Compatible with multiaddr, peer-id and others. + */ +export function arrayEquals (a: any[], b: any[]): boolean { + const sort = (a: any, b: any): number => a.toString().localeCompare(b.toString()) + + if (a.length !== b.length) { + return false + } + + b.sort(sort) + + return a.sort(sort).every((item, index) => b[index].equals(item)) +} diff --git a/packages/utils/test/array-equals.spec.ts b/packages/peer-record/test/peer-record/utils.spec.ts similarity index 96% rename from packages/utils/test/array-equals.spec.ts rename to packages/peer-record/test/peer-record/utils.spec.ts index 7b50b76071..c375db4622 100644 --- a/packages/utils/test/array-equals.spec.ts +++ b/packages/peer-record/test/peer-record/utils.spec.ts @@ -2,7 +2,7 @@ import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import { arrayEquals } from '../src/array-equals.js' +import { arrayEquals } from '../../src/peer-record/utils.js' describe('non primitive array equals', () => { it('returns true if two arrays of multiaddrs are equal', () => { diff --git a/packages/pnet/package.json b/packages/pnet/package.json index 34ebe65e27..e5fc35b983 100644 --- a/packages/pnet/package.json +++ b/packages/pnet/package.json @@ -45,23 +45,15 @@ "dependencies": { "@libp2p/crypto": "^5.1.7", "@libp2p/interface": "^2.10.5", - "it-byte-stream": "^2.0.2", - "it-map": "^3.1.3", - "it-pair": "^2.0.6", - "it-pipe": "^3.0.1", - "it-stream-types": "^2.0.2", + "@libp2p/utils": "^6.7.1", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0", "xsalsa20": "^1.2.0" }, "devDependencies": { - "@libp2p/interface-compliance-tests": "^6.4.16", - "@libp2p/logger": "^5.1.21", - "@libp2p/peer-id": "^5.1.8", - "@multiformats/multiaddr": "^12.4.4", "@types/xsalsa20": "^1.1.3", "aegir": "^47.0.14", - "it-all": "^3.0.8" + "race-event": "^1.6.1" }, "sideEffects": false } diff --git a/packages/pnet/src/crypto.ts b/packages/pnet/src/crypto.ts index a8083765a7..87384dabe2 100644 --- a/packages/pnet/src/crypto.ts +++ b/packages/pnet/src/crypto.ts @@ -1,35 +1,102 @@ +import { StreamMessageEvent } from '@libp2p/interface' +import { AbstractMessageStream } from '@libp2p/utils' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import xsalsa20 from 'xsalsa20' import * as Errors from './errors.js' import { KEY_LENGTH } from './key-generator.js' -import type { Source } from 'it-stream-types' +import type { AbortOptions, MultiaddrConnection } from '@libp2p/interface' +import type { MessageStreamInit, SendResult } from '@libp2p/utils' import type { Uint8ArrayList } from 'uint8arraylist' -/** - * Creates a stream iterable to encrypt messages in a private network - */ -export function createBoxStream (nonce: Uint8Array, psk: Uint8Array): (source: Source) => AsyncGenerator { - const xor = xsalsa20(nonce, psk) - - return (source: Source) => (async function * () { - for await (const chunk of source) { - yield Uint8Array.from(xor.update(chunk.subarray())) - } - })() +export interface BoxMessageStreamInit extends MessageStreamInit { + maConn: MultiaddrConnection + localNonce: Uint8Array + remoteNonce: Uint8Array + psk: Uint8Array } -/** - * Creates a stream iterable to decrypt messages in a private network - */ -export function createUnboxStream (nonce: Uint8Array, psk: Uint8Array) { - return (source: Source) => (async function * () { - const xor = xsalsa20(nonce, psk) +export class BoxMessageStream extends AbstractMessageStream { + private maConn: MultiaddrConnection + private inboundXor: xsalsa20.Xor + private outboundXor: xsalsa20.Xor + + constructor (init: BoxMessageStreamInit) { + super(init) + + this.inboundXor = xsalsa20(init.remoteNonce, init.psk) + this.outboundXor = xsalsa20(init.localNonce, init.psk) + this.maConn = init.maConn + + this.maConn.addEventListener('message', (evt) => { + const data = evt.data + + if (data instanceof Uint8Array) { + this.dispatchEvent(new StreamMessageEvent(this.inboundXor.update(data))) + } else { + for (const buf of data) { + this.dispatchEvent(new StreamMessageEvent(this.inboundXor.update(buf))) + } + } + }) + + this.maConn.addEventListener('close', (evt) => { + if (evt.local) { + if (evt.error != null) { + this.abort(evt.error) + } else { + this.close() + .catch(() => {}) + } + } else { + if (evt.error != null) { + this.onRemoteReset() + } else { + this.onRemoteCloseWrite() + } + } + }) + + this.maConn.addEventListener('remoteCloseWrite', () => { + this.safeDispatchEvent('remoteCloseWrite') + }) + this.maConn.addEventListener('remoteCloseRead', () => { + this.safeDispatchEvent('remoteCloseRead') + }) + this.maConn.addEventListener('closeWrite', () => { + this.safeDispatchEvent('closeWrite') + }) + this.maConn.addEventListener('closeRead', () => { + this.safeDispatchEvent('closeRead') + }) + } - for await (const chunk of source) { - yield Uint8Array.from(xor.update(chunk.subarray())) + async sendCloseWrite (options?: AbortOptions): Promise { + await this.maConn.closeWrite(options) + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendData (data: Uint8ArrayList): SendResult { + return { + sentBytes: data.byteLength, + canSendMore: this.maConn.send(this.outboundXor.update(data.subarray())) } - })() + } + + sendReset (err: Error): void { + this.maConn.abort(err) + } + + sendPause (): void { + this.maConn.pause() + } + + sendResume (): void { + this.maConn.resume() + } } /** diff --git a/packages/pnet/src/index.ts b/packages/pnet/src/index.ts index 1cda79c57f..820f01dc9e 100644 --- a/packages/pnet/src/index.ts +++ b/packages/pnet/src/index.ts @@ -58,18 +58,10 @@ import { randomBytes } from '@libp2p/crypto' import { InvalidParametersError } from '@libp2p/interface' -import { byteStream } from 'it-byte-stream' -import map from 'it-map' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' -import { - createBoxStream, - createUnboxStream, - decodeV1PSK -} from './crypto.js' +import { byteStream } from '@libp2p/utils' +import { BoxMessageStream, decodeV1PSK } from './crypto.js' import { NONCE_LENGTH } from './key-generator.js' -import type { ComponentLogger, Logger, ConnectionProtector, MultiaddrConnection } from '@libp2p/interface' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { ComponentLogger, ConnectionProtector, MultiaddrConnection, MessageStream, AbortOptions } from '@libp2p/interface' export { generateKey } from './key-generator.js' @@ -93,7 +85,6 @@ export interface ProtectorComponents { class PreSharedKeyConnectionProtector implements ConnectionProtector { public tag: string - private readonly log: Logger private readonly psk: Uint8Array private readonly timeout: number @@ -101,8 +92,7 @@ class PreSharedKeyConnectionProtector implements ConnectionProtector { * Takes a Private Shared Key (psk) and provides a `protect` method * for wrapping existing connections in a private encryption stream. */ - constructor (components: ProtectorComponents, init: ProtectorInit) { - this.log = components.logger.forComponent('libp2p:pnet') + constructor (init: ProtectorInit) { this.timeout = init.timeout ?? 1000 const decodedPSK = decodeV1PSK(init.psk) @@ -117,54 +107,49 @@ class PreSharedKeyConnectionProtector implements ConnectionProtector { * between its two peers from the PSK the Protector instance was * created with. */ - async protect (connection: MultiaddrConnection): Promise { + async protect (connection: MultiaddrConnection, options?: AbortOptions): Promise { if (connection == null) { throw new InvalidParametersError('No connection for the handshake provided') } // Exchange nonces - this.log('protecting the connection') + const log = connection.log.newScope('pnet') + log('protecting the connection') const localNonce = randomBytes(NONCE_LENGTH) - const signal = AbortSignal.timeout(this.timeout) + if (options == null) { + options = { + signal: AbortSignal.timeout(this.timeout) + } + } const bytes = byteStream(connection) const [ - , result + result ] = await Promise.all([ - bytes.write(localNonce, { - signal - }), bytes.read({ bytes: NONCE_LENGTH, - signal - }) + ...options + }), + bytes.write(localNonce, options) ]) const remoteNonce = result.subarray() // Create the boxing/unboxing pipe - this.log('exchanged nonces') - const [internal, external] = duplexPair() - pipe( - external, - // Encrypt all outbound traffic - createBoxStream(localNonce, this.psk), - bytes.unwrap(), - (source) => map(source, (buf) => buf.subarray()), - // Decrypt all inbound traffic - createUnboxStream(remoteNonce, this.psk), - external - ).catch(this.log.error) - - return { - ...connection, - ...internal - } + log('exchanged nonces') + + return new BoxMessageStream({ + localNonce, + remoteNonce, + psk: this.psk, + maConn: connection, + log + }) } } -export function preSharedKey (init: ProtectorInit): (components: ProtectorComponents) => ConnectionProtector { - return (components) => new PreSharedKeyConnectionProtector(components, init) +export function preSharedKey (init: ProtectorInit): () => ConnectionProtector { + return () => new PreSharedKeyConnectionProtector(init) } diff --git a/packages/pnet/test/index.spec.ts b/packages/pnet/test/index.spec.ts index f8f36d4961..b53bbb5458 100644 --- a/packages/pnet/test/index.spec.ts +++ b/packages/pnet/test/index.spec.ts @@ -1,12 +1,8 @@ /* eslint-env mocha */ -import { generateKeyPair } from '@libp2p/crypto/keys' -import { mockMultiaddrConnPair } from '@libp2p/interface-compliance-tests/mocks' -import { defaultLogger } from '@libp2p/logger' -import { peerIdFromPrivateKey } from '@libp2p/peer-id' -import { multiaddr } from '@multiformats/multiaddr' + +import { multiaddrConnectionPair } from '@libp2p/utils' import { expect } from 'aegir/chai' -import all from 'it-all' -import { pipe } from 'it-pipe' +import { raceEvent } from 'race-event' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { INVALID_PSK } from '../src/errors.js' import { preSharedKey, generateKey } from '../src/index.js' @@ -22,89 +18,74 @@ describe('private network', () => { it('should accept a valid psk buffer', () => { const protector = preSharedKey({ psk: swarmKeyBuffer - })({ - logger: defaultLogger() - }) + })() expect(protector).to.have.property('tag', '/key/swarm/psk/1.0.0/') }) it('should protect a simple connection', async () => { - const { inbound, outbound } = mockMultiaddrConnPair({ - addrs: [ - multiaddr('/ip4/127.0.0.1/tcp/1234'), - multiaddr('/ip4/127.0.0.1/tcp/1235') - ], - remotePeer: peerIdFromPrivateKey(await generateKeyPair('Ed25519')) + const [outboundConnection, inboundConnection] = multiaddrConnectionPair({ + delay: 10 }) + const protector = preSharedKey({ psk: swarmKeyBuffer - })({ - logger: defaultLogger() + })() + + const [outbound, inbound] = await Promise.all([ + protector.protect(outboundConnection), + protector.protect(inboundConnection) + ]) + + const output: Uint8Array[] = [] + + inbound.addEventListener('message', (evt) => { + output.push(evt.data.subarray()) }) - const [aToB, bToA] = await Promise.all([ - protector.protect(inbound), - protector.protect(outbound) + outbound.send(uint8ArrayFromString('hello world')) + outbound.send(uint8ArrayFromString('doo dah')) + + await Promise.all([ + raceEvent(inbound, 'remoteCloseWrite'), + outbound.closeWrite() ]) - void pipe( - async function * () { - yield uint8ArrayFromString('hello world') - yield uint8ArrayFromString('doo dah') - }, - aToB - ) - - const output = await pipe( - bToA, - async function * (source) { - for await (const chunk of source) { - yield chunk.slice() - } - }, - async (source) => all(source) - ) - - expect(output).to.eql([uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')]) + expect(output).to.deep.equal([uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')]) }) it('should not be able to share correct data with different keys', async () => { - const { inbound, outbound } = mockMultiaddrConnPair({ - addrs: [ - multiaddr('/ip4/127.0.0.1/tcp/1234'), - multiaddr('/ip4/127.0.0.1/tcp/1235') - ], - remotePeer: peerIdFromPrivateKey(await generateKeyPair('Ed25519')) + const [outboundConnection, inboundConnection] = multiaddrConnectionPair({ + delay: 10 }) const protector = preSharedKey({ psk: swarmKeyBuffer - })({ - logger: defaultLogger() - }) + })() const protectorB = preSharedKey({ psk: wrongSwarmKeyBuffer - })({ - logger: defaultLogger() - }) + })() - const [aToB, bToA] = await Promise.all([ - protector.protect(inbound), - protectorB.protect(outbound) + const [outbound, inbound] = await Promise.all([ + protector.protect(outboundConnection), + protectorB.protect(inboundConnection) ]) - void pipe( - async function * () { - yield uint8ArrayFromString('hello world') - yield uint8ArrayFromString('doo dah') - }, - aToB - ) + outbound.send(uint8ArrayFromString('hello world')) + outbound.send(uint8ArrayFromString('doo dah')) - const output = await pipe( - bToA, - async (source) => all(source) - ) + const output: Uint8Array[] = [] + + inbound.addEventListener('message', (evt) => { + output.push(evt.data.subarray()) + }) + + outbound.send(uint8ArrayFromString('hello world')) + outbound.send(uint8ArrayFromString('doo dah')) + + await Promise.all([ + outbound.closeWrite(), + inbound.closeWrite() + ]) expect(output).to.not.eql([uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')]) }) @@ -114,9 +95,7 @@ describe('private network', () => { expect(() => { return preSharedKey({ psk: uint8ArrayFromString('not-a-key') - })({ - logger: defaultLogger() - }) + })() }).to.throw(INVALID_PSK) }) @@ -124,9 +103,7 @@ describe('private network', () => { expect(() => { return preSharedKey({ psk: uint8ArrayFromString('/key/swarm/psk/1.0.0/\n/base16/\ndffb7e') - })({ - logger: defaultLogger() - }) + })() }).to.throw(INVALID_PSK) }) }) diff --git a/packages/protocol-autonat-v2/package.json b/packages/protocol-autonat-v2/package.json index bdf0bf938c..578693a597 100644 --- a/packages/protocol-autonat-v2/package.json +++ b/packages/protocol-autonat-v2/package.json @@ -51,7 +51,6 @@ "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "any-signal": "^4.1.1", - "it-protobuf-stream": "^2.0.2", "main-event": "^1.0.1", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8", @@ -62,10 +61,10 @@ "@libp2p/logger": "^5.1.21", "@libp2p/peer-id": "^5.1.8", "aegir": "^47.0.14", + "delay": "^6.0.0", "it-all": "^3.0.8", "it-length-prefixed": "^10.0.1", "it-pipe": "^3.0.1", - "it-pushable": "^3.2.3", "p-retry": "^6.2.1", "protons": "^7.6.1", "sinon": "^20.0.0", diff --git a/packages/protocol-autonat-v2/src/client.ts b/packages/protocol-autonat-v2/src/client.ts index 691acd6f1a..64876886a6 100644 --- a/packages/protocol-autonat-v2/src/client.ts +++ b/packages/protocol-autonat-v2/src/client.ts @@ -1,23 +1,16 @@ import { ProtocolError, serviceCapabilities, serviceDependencies } from '@libp2p/interface' import { peerSet } from '@libp2p/peer-collections' -import { createScalableCuckooFilter } from '@libp2p/utils/filters' -import { isGlobalUnicast } from '@libp2p/utils/multiaddr/is-global-unicast' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' -import { PeerQueue } from '@libp2p/utils/peer-queue' -import { repeatingTask } from '@libp2p/utils/repeating-task' -import { trackedMap } from '@libp2p/utils/tracked-map' +import { createScalableCuckooFilter, isGlobalUnicast, isPrivate, PeerQueue, repeatingTask, trackedMap, pbStream } from '@libp2p/utils' import { anySignal } from 'any-signal' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import { DEFAULT_CONNECTION_THRESHOLD, DIAL_DATA_CHUNK_SIZE, MAX_DIAL_DATA_BYTES, MAX_INBOUND_STREAMS, MAX_MESSAGE_SIZE, MAX_OUTBOUND_STREAMS, TIMEOUT } from './constants.ts' import { DialBack, DialBackResponse, DialResponse, DialStatus, Message } from './pb/index.ts' import { randomNumber } from './utils.ts' import type { AutoNATv2Components, AutoNATv2ServiceInit } from './index.ts' -import type { Logger, Connection, Startable, AbortOptions, IncomingStreamData } from '@libp2p/interface' +import type { Logger, Connection, Startable, AbortOptions, Stream } from '@libp2p/interface' import type { AddressType } from '@libp2p/interface-internal' import type { PeerSet } from '@libp2p/peer-collections' -import type { Filter } from '@libp2p/utils/filters' -import type { RepeatingTask } from '@libp2p/utils/repeating-task' +import type { Filter, RepeatingTask } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' // if more than 3 peers manage to dial us on what we believe to be our external @@ -154,8 +147,8 @@ export class AutoNATv2Client implements Startable { } }) - await this.components.registrar.handle(this.dialBackProtocol, (data) => { - void this.handleDialBackStream(data) + await this.components.registrar.handle(this.dialBackProtocol, (stream, connection) => { + void this.handleDialBackStream(stream, connection) .catch(err => { this.log.error('error handling incoming autonat stream - %e', err) }) @@ -239,11 +232,11 @@ export class AutoNATv2Client implements Startable { /** * Handle an incoming AutoNAT request */ - async handleDialBackStream (data: IncomingStreamData): Promise { + async handleDialBackStream (stream: Stream, connection: Connection): Promise { const signal = AbortSignal.timeout(this.timeout) setMaxListeners(Infinity, signal) - const messages = pbStream(data.stream, { + const messages = pbStream(stream, { maxDataLength: this.maxMessageSize }) @@ -264,12 +257,12 @@ export class AutoNATv2Client implements Startable { status: DialBackResponse.DialBackStatus.OK }, DialBackResponse) - await data.stream.close({ + await stream.closeWrite({ signal }) } catch (err: any) { this.log.error('error handling incoming dial back stream - %e', err) - data.stream.abort(err) + stream.abort(err) } } @@ -465,7 +458,7 @@ export class AutoNATv2Client implements Startable { return } - this.log.trace('asking %p to verify multiaddrs %s', connection.remotePeer, unverifiedAddresses) + this.log.trace('asking %a to verify multiaddrs %s', connection.remoteAddr, unverifiedAddresses) const stream = await connection.newStream(this.dialRequestProtocol, options) @@ -578,7 +571,7 @@ export class AutoNATv2Client implements Startable { } } finally { try { - await stream.close(options) + await stream.closeWrite(options) } catch (err: any) { stream.abort(err) } diff --git a/packages/protocol-autonat-v2/src/server.ts b/packages/protocol-autonat-v2/src/server.ts index ed1f77dd7c..b87ccf0f3d 100644 --- a/packages/protocol-autonat-v2/src/server.ts +++ b/packages/protocol-autonat-v2/src/server.ts @@ -1,15 +1,14 @@ import { ProtocolError } from '@libp2p/interface' -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { isPrivateIp, pbStream } from '@libp2p/utils' import { CODE_IP4, CODE_IP6, multiaddr } from '@multiformats/multiaddr' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import { MAX_INBOUND_STREAMS, MAX_MESSAGE_SIZE, MAX_OUTBOUND_STREAMS, TIMEOUT } from './constants.ts' import { DialBack, DialBackResponse, DialResponse, DialStatus, Message } from './pb/index.ts' import { randomNumber } from './utils.ts' import type { AutoNATv2Components, AutoNATv2ServiceInit } from './index.ts' -import type { Logger, Connection, Startable, AbortOptions, IncomingStreamData, Stream } from '@libp2p/interface' +import type { Logger, Connection, Startable, AbortOptions, Stream } from '@libp2p/interface' +import type { ProtobufMessageStream } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' -import type { MessageStream } from 'it-protobuf-stream' export interface AutoNATv2ServerInit extends AutoNATv2ServiceInit { dialRequestProtocol: string @@ -37,6 +36,8 @@ export class AutoNATv2Server implements Startable { this.maxInboundStreams = init.maxInboundStreams ?? MAX_INBOUND_STREAMS this.maxOutboundStreams = init.maxOutboundStreams ?? MAX_OUTBOUND_STREAMS this.maxMessageSize = init.maxMessageSize ?? MAX_MESSAGE_SIZE + + this.handleDialRequestStream = this.handleDialRequestStream.bind(this) } async start (): Promise { @@ -45,12 +46,7 @@ export class AutoNATv2Server implements Startable { } // AutoNat server - await this.components.registrar.handle(this.dialRequestProtocol, (data) => { - void this.handleDialRequestStream(data) - .catch(err => { - this.log.error('error handling incoming autonat stream - %e', err) - }) - }, { + await this.components.registrar.handle(this.dialRequestProtocol, this.handleDialRequestStream, { maxInboundStreams: this.maxInboundStreams, maxOutboundStreams: this.maxOutboundStreams }) @@ -67,100 +63,95 @@ export class AutoNATv2Server implements Startable { /** * Handle an incoming AutoNAT request */ - async handleDialRequestStream (data: IncomingStreamData): Promise { + async handleDialRequestStream (stream: Stream, connection: Connection): Promise { const signal = AbortSignal.timeout(this.timeout) setMaxListeners(Infinity, signal) - const messages = pbStream(data.stream, { + const messages = pbStream(stream, { maxDataLength: this.maxMessageSize }).pb(Message) - try { - const connectionIp = getIpAddress(data.connection.remoteAddr) + const connectionIp = getIpAddress(connection.remoteAddr) - if (connectionIp == null) { - throw new ProtocolError(`Could not find IP address in connection address "${data.connection.remoteAddr}"`) - } + if (connectionIp == null) { + throw new ProtocolError(`Could not find IP address in connection address "${connection.remoteAddr}"`) + } - const { dialRequest } = await messages.read({ - signal - }) + const { dialRequest } = await messages.read({ + signal + }) - if (dialRequest == null) { - throw new ProtocolError('Did not receive DialRequest message on incoming dial request stream') - } + if (dialRequest == null) { + throw new ProtocolError('Did not receive DialRequest message on incoming dial request stream') + } - if (dialRequest.addrs.length === 0) { - throw new ProtocolError('Did not receive any addresses to dial') - } + if (dialRequest.addrs.length === 0) { + throw new ProtocolError('Did not receive any addresses to dial') + } - for (let i = 0; i < dialRequest.addrs.length; i++) { - try { - const ma = multiaddr(dialRequest.addrs[i]) - const isDialable = await this.components.connectionManager.isDialable(ma, { + for (let i = 0; i < dialRequest.addrs.length; i++) { + try { + const ma = multiaddr(dialRequest.addrs[i]) + const isDialable = await this.components.connectionManager.isDialable(ma, { + signal + }) + + if (!isDialable) { + await messages.write({ + dialResponse: { + addrIdx: i, + status: DialResponse.ResponseStatus.E_DIAL_REFUSED, + dialStatus: DialStatus.UNUSED + } + }, { signal }) - if (!isDialable) { - await messages.write({ - dialResponse: { - addrIdx: i, - status: DialResponse.ResponseStatus.E_DIAL_REFUSED, - dialStatus: DialStatus.UNUSED - } - }, { - signal - }) - - continue - } - - const ip = getIpAddress(ma) - - if (ip == null) { - throw new ProtocolError(`Could not find IP address in requested address "${ma}"`) - } + continue + } - if (isPrivateIp(ip)) { - throw new ProtocolError(`Requested address had private IP "${ma}"`) - } + const ip = getIpAddress(ma) - if (ip !== connectionIp) { - // amplification attack protection - request the client sends us a - // random number of bytes before we'll dial the address - await this.preventAmplificationAttack(messages, i, { - signal - }) - } + if (ip == null) { + throw new ProtocolError(`Could not find IP address in requested address "${ma}"`) + } - const dialStatus = await this.dialClientBack(ma, dialRequest.nonce, { - signal - }) + if (isPrivateIp(ip)) { + throw new ProtocolError(`Requested address had private IP "${ma}"`) + } - await messages.write({ - dialResponse: { - addrIdx: i, - status: DialResponse.ResponseStatus.OK, - dialStatus - } - }, { + if (ip !== connectionIp) { + // amplification attack protection - request the client sends us a + // random number of bytes before we'll dial the address + await this.preventAmplificationAttack(messages, i, { signal }) - } catch (err) { - this.log.error('could not parse multiaddr - %e', err) } - } - await data.stream.close({ - signal - }) - } catch (err: any) { - this.log.error('error handling incoming autonat stream - %e', err) - data.stream.abort(err) + const dialStatus = await this.dialClientBack(ma, dialRequest.nonce, { + signal + }) + + await messages.write({ + dialResponse: { + addrIdx: i, + status: DialResponse.ResponseStatus.OK, + dialStatus + } + }, { + signal + }) + } catch (err) { + this.log.error('error handling incoming dialback request - %e', err) + } } + + await stream.closeWrite({ + signal + }) } - private async preventAmplificationAttack (messages: MessageStream, index: number, options: AbortOptions): Promise { + private async preventAmplificationAttack (messages: ProtobufMessageStream, index: number, options: AbortOptions): Promise { const numBytes = randomNumber(30_000, 100_000) await messages.write({ @@ -207,7 +198,7 @@ export class AutoNATv2Server implements Startable { nonce }, DialBack, options) - const response = await dialBackMessages.read(DialBackResponse) + const response = await dialBackMessages.read(DialBackResponse, options) if (response.status !== DialBackResponse.DialBackStatus.OK) { throw new ProtocolError('DialBackResponse status was not OK') diff --git a/packages/protocol-autonat-v2/test/client.spec.ts b/packages/protocol-autonat-v2/test/client.spec.ts index 18041b4f5e..2532297c7d 100644 --- a/packages/protocol-autonat-v2/test/client.spec.ts +++ b/packages/protocol-autonat-v2/test/client.spec.ts @@ -4,10 +4,10 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import * as lp from 'it-length-prefixed' -import { pushable } from 'it-pushable' +import delay from 'delay' import pRetry from 'p-retry' import sinon from 'sinon' import { stubInterface } from 'sinon-ts' @@ -15,7 +15,7 @@ import { AutoNATv2Service } from '../src/autonat.ts' import { PROTOCOL_NAME, PROTOCOL_PREFIX, PROTOCOL_VERSION } from '../src/constants.ts' import { DialResponse, DialStatus, Message } from '../src/pb/index.ts' import type { AutoNATv2Components, AutoNATv2ServiceInit } from '../src/index.ts' -import type { Connection, Stream, PeerId, PeerStore, Peer } from '@libp2p/interface' +import type { Connection, PeerId, PeerStore, Peer } from '@libp2p/interface' import type { AddressManager, ConnectionManager, RandomWalk, Registrar } from '@libp2p/interface-internal' import type { StubbedInstance } from 'sinon-ts' @@ -101,67 +101,49 @@ describe('autonat v2 - client', () => { connection.remotePeer = peer.id connectionManager.openConnection.withArgs(peer.id).resolves(connection) - connection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}/dial-request`).callsFake(async () => { - const incomingMessages = pushable({ - objectMode: true - }) - - // stub autonat protocol stream - const stream = stubInterface({ - source: (async function * () { - const { done, value } = await incomingMessages.next() + const [outgoingStream, incomingStream] = await streamPair() - if (done) { - return - } + connection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}/dial-request`).resolves(outgoingStream) - if (value.dialRequest == null) { - throw new Error('Unexpected message') - } + const messages = pbStream(incomingStream).pb(Message) - for (const addr of value.dialRequest.addrs.map(buf => multiaddr(buf))) { - let responses = data.messages[addr.toString()] + Promise.resolve().then(async () => { + const message = await messages.read() - if (responses == null) { - throw new Error(`No response defined for address ${addr}`) - } + if (message.dialRequest == null) { + throw new Error('Unexpected message') + } - if (!Array.isArray(responses)) { - responses = [responses] - } + for (const addr of message.dialRequest.addrs.map(buf => multiaddr(buf))) { + let responses = data.messages[addr.toString()] - for (const response of responses) { - yield lp.encode.single(Message.encode(response)) + if (responses == null) { + throw new Error(`No response defined for address ${addr}`) + } - if (response.dialDataRequest != null) { - // read data requests - for (let read = 0; read < response.dialDataRequest.numBytes;) { - const { done, value } = await incomingMessages.next() + if (!Array.isArray(responses)) { + responses = [responses] + } - if (done) { - return - } + for (const response of responses) { + await messages.write(response) - if (value.dialDataResponse == null) { - throw new Error('Incorrect message type') - } + if (response.dialDataRequest != null) { + // read data requests + for (let read = 0; read < response.dialDataRequest.numBytes;) { + const message = await messages.read() - read += value.dialDataResponse.data.byteLength - } + if (message.dialDataResponse == null) { + throw new Error('Incorrect message type') } + + read += message.dialDataResponse.data.byteLength } } - }()), - sink: async (source) => { - for await (const buf of lp.decode(source)) { - incomingMessages.push(Message.decode(buf)) - } - - incomingMessages.end() } - }) + } - return stream + await incomingStream.closeWrite() }) return connection @@ -230,6 +212,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -303,6 +286,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -424,6 +408,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -545,6 +530,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -738,6 +724,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -913,6 +900,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -986,6 +974,7 @@ describe('autonat v2 - client', () => { for (const conn of connections) { await service.client.verifyExternalAddresses(conn) + await delay(100) } expect(addressManager.addObservedAddr.called) diff --git a/packages/protocol-autonat-v2/test/server.spec.ts b/packages/protocol-autonat-v2/test/server.spec.ts index 4ba7d82036..2323cc51be 100644 --- a/packages/protocol-autonat-v2/test/server.spec.ts +++ b/packages/protocol-autonat-v2/test/server.spec.ts @@ -2,20 +2,19 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import all from 'it-all' import * as lp from 'it-length-prefixed' import { pipe } from 'it-pipe' -import { pushable } from 'it-pushable' import sinon from 'sinon' import { stubInterface } from 'sinon-ts' -import { Uint8ArrayList } from 'uint8arraylist' import { AutoNATv2Service } from '../src/autonat.ts' import { PROTOCOL_NAME, PROTOCOL_PREFIX, PROTOCOL_VERSION } from '../src/constants.ts' import { DialBack, DialBackResponse, DialResponse, DialStatus, Message } from '../src/pb/index.ts' import type { AutoNATv2Components, AutoNATv2ServiceInit } from '../src/index.ts' -import type { Connection, Stream, PeerId, PeerStore } from '@libp2p/interface' +import type { Connection, PeerId, PeerStore } from '@libp2p/interface' import type { AddressManager, ConnectionManager, RandomWalk, Registrar } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' import type { StubbedInstance } from 'sinon-ts' @@ -81,34 +80,19 @@ describe('autonat v2 - server', () => { message?: Message | Uint8Array | boolean transportSupported?: boolean canDial?: boolean - } = {}): Promise { + } = {}): Promise { const requestingPeer = opts.requestingPeer ?? peerIdFromPrivateKey(await generateKeyPair('Ed25519')) const remotePeer = opts.remotePeer ?? requestingPeer const observedAddress = opts.observedAddress ?? multiaddr('/ip4/124.124.124.124/tcp/28319') const remoteAddr = opts.remoteAddr ?? observedAddress.encapsulate(`/p2p/${remotePeer.toString()}`) - const source = pushable() - const sink = pushable() - const stream: Stream = { - ...stubInterface(), - source, - sink: async (stream) => { - for await (const buf of stream) { - sink.push(new Uint8ArrayList(buf)) - } - sink.end() - }, - abort: (err) => { - void sink.throw(err) - }, - close: async () => { - sink.end() - } - } - const connection = { - ...stubInterface(), + + const [outgoingStream, incomingStream] = await streamPair() + const [outgoingDialbackStream, incomingDialbackStream] = await streamPair() + + const connection = stubInterface({ remotePeer, remoteAddr - } + }) const nonce = 12345n @@ -116,28 +100,18 @@ describe('autonat v2 - server', () => { const newConnection = stubInterface() newConnection.remotePeer = remotePeer newConnection.remoteAddr = remoteAddr - newConnection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}/dial-back`).callsFake(async () => { - const dialBackMessage = Promise.withResolvers() + newConnection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}/dial-back`).resolves(outgoingDialbackStream) + Promise.resolve().then(async () => { // stub autonat protocol stream - const stream = stubInterface({ - source: (async function * () { - const message = await dialBackMessage.promise - - expect(message.nonce).to.equal(nonce) - - yield lp.encode.single(DialBackResponse.encode({ - status: DialBackResponse.DialBackStatus.OK - })) - }()), - sink: async (source) => { - for await (const buf of lp.decode(source)) { - dialBackMessage.resolve(DialBack.decode(buf)) - } - } - }) + const incomingDialbackStreamMessages = pbStream(incomingDialbackStream) - return stream + const message = await incomingDialbackStreamMessages.read(DialBack) + expect(message.nonce).to.equal(nonce) + + await incomingDialbackStreamMessages.write({ + status: DialBackResponse.DialBackStatus.OK + }, DialBackResponse) }) if (opts.canDial === false) { @@ -147,57 +121,72 @@ describe('autonat v2 - server', () => { connectionManager.openConnection.resolves(newConnection) } - let buf: Uint8Array | undefined - - if (opts.message instanceof Uint8Array) { - buf = opts.message - } else if (opts.message == null) { - buf = Message.encode({ - dialRequest: { - nonce, - addrs: [ - observedAddress.bytes - ] - } - }) - } else if (opts.message !== false && opts.message !== true) { - buf = Message.encode(opts.message) - } + const messagesPromise = pipe( + outgoingStream, + (source) => lp.decode(source), + async source => all(source) + ) - if (buf != null) { - source.push(lp.encode.single(buf)) - } + Promise.resolve().then(async () => { + let buf: Uint8Array | undefined + + if (opts.message instanceof Uint8Array) { + buf = opts.message + } else if (opts.message == null) { + buf = Message.encode({ + dialRequest: { + nonce, + addrs: [ + observedAddress.bytes + ] + } + }) + } else if (opts.message !== false && opts.message !== true) { + buf = Message.encode(opts.message) + } + + if (buf != null) { + outgoingStream.send(lp.encode.single(buf)) + } + + const messages = pbStream(outgoingStream) + const message = await messages.read(Message) - source.end() + if (message.dialDataRequest != null) { + let toWrite = Number(message.dialDataRequest.numBytes) + const chunk = 8180 - await service.server.handleDialRequestStream({ - stream, - connection + while (toWrite > 0) { + await messages.write({ + dialDataResponse: { + data: new Uint8Array(chunk) + } + }, Message) + + toWrite -= chunk + } + } }) - const slice = await pipe( - sink, - (source) => lp.decode(source), - async source => all(source) - ) + await service.server.handleDialRequestStream(incomingStream, connection) - if (slice.length !== 1) { - throw new Error('Response was not length encoded') - } + const messages = (await messagesPromise).map(buf => Message.decode(buf)) - const message = Message.decode(slice[0]) + if (messages.length === 0) { + throw new Error('Not enough messages') + } - if (message.dialResponse?.status === DialResponse.ResponseStatus.OK) { + if (messages[messages.length - 1].dialResponse?.status === DialResponse.ResponseStatus.OK) { expect(newConnection.close.called).to.be.true('Did not close connection after dial') } - return message + return messages } it('should dial a requested address', async () => { - const message = await stubIncomingStream({ + const message = (await stubIncomingStream({ canDial: true - }) + })).pop() expect(message).to.have.nested.property('dialResponse.status', DialResponse.ResponseStatus.OK) expect(message).to.have.nested.property('dialResponse.dialStatus', DialStatus.OK) @@ -256,11 +245,11 @@ describe('autonat v2 - server', () => { const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) const requestingPeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const message = await stubIncomingStream({ + const message = (await stubIncomingStream({ remotePeer, remoteAddr: multiaddr(`/ip4/223.223.223.223/tcp/27132/p2p/${remotePeer.toString()}/p2p-circuit/p2p/${requestingPeer.toString()}`), requestingPeer - }) + })).pop() expect(message).to.have.nested.property('dialResponse.status', DialResponse.ResponseStatus.E_DIAL_REFUSED) }) @@ -270,28 +259,29 @@ describe('autonat v2 - server', () => { const observedAddress = multiaddr('/ip4/20.20.20.10/tcp/27132') const remoteAddr = multiaddr(`/ip4/129.129.129.129/tcp/27132/p2p/${requestingPeer.toString()}`) - const message = await stubIncomingStream({ + const messages = await stubIncomingStream({ requestingPeer, remoteAddr, observedAddress, canDial: true }) - expect(message).to.have.nested.property('dialDataRequest.numBytes').that.is.a('BigInt') + expect(messages[0]).to.have.nested.property('dialDataRequest.numBytes').that.is.a('BigInt') + expect(messages[1]).to.have.nested.property('dialResponse.status', DialResponse.ResponseStatus.OK) }) it('should refuse to dial a requested address when it is on an unsupported transport', async () => { - const message = await stubIncomingStream({ + const message = (await stubIncomingStream({ transportSupported: false - }) + })).pop() expect(message).to.have.nested.property('dialResponse.status', DialResponse.ResponseStatus.E_DIAL_REFUSED) }) it('should error when dialing a requested address', async () => { - const message = await stubIncomingStream({ + const message = (await stubIncomingStream({ canDial: false - }) + })).pop() expect(message).to.have.nested.property('dialResponse.status', DialResponse.ResponseStatus.E_DIAL_REFUSED) expect(message).to.have.nested.property('dialResponse.dialStatus', DialStatus.UNUSED) diff --git a/packages/protocol-autonat/package.json b/packages/protocol-autonat/package.json index c8cadcdb6f..2685236e5c 100644 --- a/packages/protocol-autonat/package.json +++ b/packages/protocol-autonat/package.json @@ -52,7 +52,6 @@ "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "any-signal": "^4.1.1", - "it-protobuf-stream": "^2.0.2", "main-event": "^1.0.1", "multiformats": "^13.3.6", "protons-runtime": "^5.5.0", @@ -62,11 +61,10 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", + "delay": "^6.0.0", "it-all": "^3.0.8", - "it-drain": "^3.0.9", "it-length-prefixed": "^10.0.1", "it-pipe": "^3.0.1", - "it-pushable": "^3.2.3", "p-retry": "^6.2.1", "protons": "^7.6.1", "sinon": "^20.0.0", diff --git a/packages/protocol-autonat/src/autonat.ts b/packages/protocol-autonat/src/autonat.ts index 3d4fcbbec3..33810e56ea 100644 --- a/packages/protocol-autonat/src/autonat.ts +++ b/packages/protocol-autonat/src/autonat.ts @@ -1,25 +1,18 @@ import { serviceCapabilities, serviceDependencies } from '@libp2p/interface' import { peerSet } from '@libp2p/peer-collections' import { peerIdFromMultihash } from '@libp2p/peer-id' -import { createScalableCuckooFilter } from '@libp2p/utils/filters' -import { isGlobalUnicast } from '@libp2p/utils/multiaddr/is-global-unicast' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' -import { PeerQueue } from '@libp2p/utils/peer-queue' -import { repeatingTask } from '@libp2p/utils/repeating-task' -import { trackedMap } from '@libp2p/utils/tracked-map' -import { multiaddr, protocols } from '@multiformats/multiaddr' +import { createScalableCuckooFilter, isGlobalUnicast, isPrivate, PeerQueue, repeatingTask, trackedMap, pbStream } from '@libp2p/utils' +import { CODE_P2P, multiaddr } from '@multiformats/multiaddr' import { anySignal } from 'any-signal' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import * as Digest from 'multiformats/hashes/digest' import { DEFAULT_CONNECTION_THRESHOLD, MAX_INBOUND_STREAMS, MAX_MESSAGE_SIZE, MAX_OUTBOUND_STREAMS, PROTOCOL_NAME, PROTOCOL_PREFIX, PROTOCOL_VERSION, TIMEOUT } from './constants.js' import { Message } from './pb/index.js' import type { AutoNATComponents, AutoNATServiceInit } from './index.js' -import type { Logger, Connection, PeerId, Startable, AbortOptions, IncomingStreamData } from '@libp2p/interface' +import type { Logger, Connection, PeerId, Startable, AbortOptions, Stream } from '@libp2p/interface' import type { AddressType } from '@libp2p/interface-internal' import type { PeerSet } from '@libp2p/peer-collections' -import type { Filter } from '@libp2p/utils/filters' -import type { RepeatingTask } from '@libp2p/utils/repeating-task' +import type { Filter, RepeatingTask } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' // if more than 3 peers manage to dial us on what we believe to be our external @@ -136,8 +129,8 @@ export class AutoNATService implements Startable { return } - await this.components.registrar.handle(this.protocol, (data) => { - void this.handleIncomingAutonatStream(data) + await this.components.registrar.handle(this.protocol, (stream, connection) => { + void this.handleIncomingAutonatStream(stream, connection) .catch(err => { this.log.error('error handling incoming autonat stream - %e', err) }) @@ -229,30 +222,30 @@ export class AutoNATService implements Startable { /** * Handle an incoming AutoNAT request */ - async handleIncomingAutonatStream (data: IncomingStreamData): Promise { + async handleIncomingAutonatStream (stream: Stream, connection: Connection): Promise { const signal = AbortSignal.timeout(this.timeout) setMaxListeners(Infinity, signal) - const messages = pbStream(data.stream, { - maxDataLength: this.maxMessageSize - }).pb(Message) - try { + const messages = pbStream(stream, { + maxDataLength: this.maxMessageSize + }).pb(Message) + const request = await messages.read({ signal }) - const response = await this.handleAutonatMessage(request, data.connection, { + const response = await this.handleAutonatMessage(request, connection, { signal }) await messages.write(response, { signal }) - await messages.unwrap().unwrap().close({ + await stream.closeWrite({ signal }) } catch (err: any) { this.log.error('error handling incoming autonat stream - %e', err) - data.stream.abort(err) + stream.abort(err) } } @@ -400,7 +393,7 @@ export class AutoNATService implements Startable { type: Message.MessageType.DIAL_RESPONSE, dialResponse: { status: Message.ResponseStatus.OK, - addr: connection.remoteAddr.decapsulateCode(protocols('p2p').code).bytes + addr: connection.remoteAddr.decapsulateCode(CODE_P2P).bytes } } } catch (err: any) { @@ -629,7 +622,7 @@ export class AutoNATService implements Startable { const signal = AbortSignal.timeout(this.timeout) setMaxListeners(Infinity, signal) - this.log.trace('asking %p to verify multiaddr %s', connection.remotePeer, options.multiaddr) + this.log.trace('asking %a to verify multiaddr %s', connection.remoteAddr, options.multiaddr) const stream = await connection.newStream(this.protocol, { signal @@ -711,7 +704,7 @@ export class AutoNATService implements Startable { } } finally { try { - await stream.close({ + await stream.closeWrite({ signal }) } catch (err: any) { diff --git a/packages/protocol-autonat/test/index.spec.ts b/packages/protocol-autonat/test/index.spec.ts index 24c9be88c2..251456b925 100644 --- a/packages/protocol-autonat/test/index.spec.ts +++ b/packages/protocol-autonat/test/index.spec.ts @@ -5,13 +5,13 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' +import delay from 'delay' import all from 'it-all' -import drain from 'it-drain' import * as lp from 'it-length-prefixed' import { pipe } from 'it-pipe' -import { pushable } from 'it-pushable' import { TypedEventEmitter } from 'main-event' import pRetry from 'p-retry' import sinon from 'sinon' @@ -21,7 +21,7 @@ import { AutoNATService } from '../src/autonat.js' import { PROTOCOL_NAME, PROTOCOL_PREFIX, PROTOCOL_VERSION } from '../src/constants.js' import { Message } from '../src/pb/index.js' import type { AutoNATComponents, AutoNATServiceInit } from '../src/index.js' -import type { Connection, Stream, PeerId, Transport, Libp2pEvents, PeerStore, Peer } from '@libp2p/interface' +import type { Connection, PeerId, Transport, Libp2pEvents, PeerStore, Peer } from '@libp2p/interface' import type { AddressManager, ConnectionManager, RandomWalk, Registrar, TransportManager } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' import type { StubbedInstance } from 'sinon-ts' @@ -105,24 +105,20 @@ describe('autonat', () => { connection.remotePeer = peer.id connectionManager.openConnection.withArgs(peer.id).resolves(connection) - connection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}`).callsFake(async () => { - // stub autonat response - const response = Message.encode({ - type: Message.MessageType.DIAL_RESPONSE, - dialResponse - }) + const [outgoingStream, incomingStream] = await streamPair() - // stub autonat protocol stream - const stream = stubInterface({ - source: (async function * () { - yield lp.encode.single(response) - }()), - sink: async (source) => { - await drain(source) - } - }) + // stub autonat protocol stream + connection.newStream.withArgs(`/${PROTOCOL_PREFIX}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}`).resolves(outgoingStream) - return stream + // stub autonat response + const response = Message.encode({ + type: Message.MessageType.DIAL_RESPONSE, + dialResponse + }) + + incomingStream.addEventListener('message', async (evt) => { + incomingStream.send(lp.encode.single(response)) + await incomingStream.closeWrite() }) return connection @@ -155,6 +151,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -192,6 +189,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -241,6 +239,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -287,6 +286,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -348,6 +348,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -411,6 +412,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } await pRetry(() => { @@ -448,6 +450,7 @@ describe('autonat', () => { for (const conn of connections) { await service.verifyExternalAddresses(conn) + await delay(100) } expect(addressManager.addObservedAddr.called) @@ -469,29 +472,11 @@ describe('autonat', () => { const remotePeer = opts.remotePeer ?? requestingPeer const observedAddress = opts.observedAddress ?? multiaddr('/ip4/124.124.124.124/tcp/28319') const remoteAddr = opts.remoteAddr ?? observedAddress.encapsulate(`/p2p/${remotePeer.toString()}`) - const source = pushable() - const sink = pushable() - const stream: Stream = { - ...stubInterface(), - source, - sink: async (stream) => { - for await (const buf of stream) { - sink.push(new Uint8ArrayList(buf)) - } - sink.end() - }, - abort: (err) => { - void stream.source.throw(err) - }, - close: async () => { - sink.end() - } - } - const connection = { - ...stubInterface(), + const connection = stubInterface({ remotePeer, remoteAddr - } + }) + const [outgoingStream, incomingStream] = await streamPair() // we might support this transport transportManager.dialTransportForMultiaddr.withArgs(observedAddress) @@ -508,7 +493,7 @@ describe('autonat', () => { connectionManager.openConnection.resolves(newConnection) } - let buf: Uint8Array | undefined + let buf: Uint8Array | Uint8ArrayList | undefined if (opts.message instanceof Uint8Array) { buf = opts.message @@ -529,22 +514,21 @@ describe('autonat', () => { } if (buf != null) { - source.push(lp.encode.single(buf)) + outgoingStream.send(lp.encode.single(buf)) } - source.end() + outgoingStream.closeWrite() - await service.handleIncomingAutonatStream({ - stream, - connection - }) - - const slice = await pipe( - sink, + const messagesPromise = pipe( + outgoingStream, (source) => lp.decode(source), async source => all(source) ) + await service.handleIncomingAutonatStream(incomingStream, connection) + + const slice = await messagesPromise + if (slice.length !== 1) { throw new Error('Response was not length encoded') } diff --git a/packages/protocol-dcutr/package.json b/packages/protocol-dcutr/package.json index f7bbf55838..40f18a4b13 100644 --- a/packages/protocol-dcutr/package.json +++ b/packages/protocol-dcutr/package.json @@ -51,7 +51,6 @@ "@multiformats/multiaddr": "^12.4.4", "@multiformats/multiaddr-matcher": "^2.0.0", "delay": "^6.0.0", - "it-protobuf-stream": "^2.0.2", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8" }, diff --git a/packages/protocol-dcutr/src/dcutr.ts b/packages/protocol-dcutr/src/dcutr.ts index 461e2a4e22..ebfd76e1c0 100644 --- a/packages/protocol-dcutr/src/dcutr.ts +++ b/packages/protocol-dcutr/src/dcutr.ts @@ -1,8 +1,8 @@ import { InvalidMessageError, serviceDependencies } from '@libp2p/interface' +import { pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { Circuit } from '@multiformats/multiaddr-matcher' import delay from 'delay' -import { pbStream } from 'it-protobuf-stream' import { HolePunch } from './pb/message.js' import { isPublicAndDialable } from './utils.js' import { multicodec } from './index.js' @@ -52,6 +52,8 @@ export class DefaultDCUtRService implements Startable { this.retries = init.retries ?? defaultValues.retries this.maxInboundStreams = init.maxInboundStreams ?? defaultValues.maxInboundStreams this.maxOutboundStreams = init.maxOutboundStreams ?? defaultValues.maxOutboundStreams + + this.handleIncomingUpgrade = this.handleIncomingUpgrade.bind(this) } readonly [Symbol.toStringTag] = '@libp2p/dcutr' @@ -91,12 +93,7 @@ export class DefaultDCUtRService implements Startable { } }) - await this.registrar.handle(multicodec, (data) => { - void this.handleIncomingUpgrade(data.stream, data.connection).catch(err => { - this.log.error('error during incoming DCUtR attempt', err) - data.stream.abort(err) - }) - }, { + await this.registrar.handle(multicodec, this.handleIncomingUpgrade, { maxInboundStreams: this.maxInboundStreams, maxOutboundStreams: this.maxOutboundStreams, runOnLimitedConnection: true @@ -215,7 +212,7 @@ export class DefaultDCUtRService implements Startable { } } finally { if (stream != null) { - await stream.close(options) + await stream.closeWrite(options) } } } @@ -293,66 +290,60 @@ export class DefaultDCUtRService implements Startable { signal: AbortSignal.timeout(this.timeout) } - try { - const pb = pbStream(stream, { - maxDataLength: MAX_DCUTR_MESSAGE_SIZE - }).pb(HolePunch) + const pb = pbStream(stream, { + maxDataLength: MAX_DCUTR_MESSAGE_SIZE + }).pb(HolePunch) - this.log('A receiving connect') - // 3. Upon receiving the Connect, A responds back with a Connect message - // containing its observed (and possibly predicted) addresses. - const connect = await pb.read(options) + this.log('A receiving connect') + // 3. Upon receiving the Connect, A responds back with a Connect message + // containing its observed (and possibly predicted) addresses. + const connect = await pb.read(options) - if (connect.type !== HolePunch.Type.CONNECT) { - this.log('B sent wrong message type') - throw new InvalidMessageError('DCUtR message type was incorrect') - } + if (connect.type !== HolePunch.Type.CONNECT) { + this.log('B sent wrong message type') + throw new InvalidMessageError('DCUtR message type was incorrect') + } - if (connect.observedAddresses.length === 0) { - this.log('B sent no multiaddrs') - throw new InvalidMessageError('DCUtR connect message had no multiaddrs') - } + if (connect.observedAddresses.length === 0) { + this.log('B sent no multiaddrs') + throw new InvalidMessageError('DCUtR connect message had no multiaddrs') + } - const multiaddrs = this.getDialableMultiaddrs(connect.observedAddresses) + const multiaddrs = this.getDialableMultiaddrs(connect.observedAddresses) - if (multiaddrs.length === 0) { - this.log('B had no dialable multiaddrs') - throw new InvalidMessageError('DCUtR connect message had no dialable multiaddrs') - } + if (multiaddrs.length === 0) { + this.log('B had no dialable multiaddrs') + throw new InvalidMessageError('DCUtR connect message had no dialable multiaddrs') + } - this.log('A sending connect') - await pb.write({ - type: HolePunch.Type.CONNECT, - observedAddresses: this.addressManager.getAddresses().map(ma => ma.bytes) - }) + this.log('A sending connect') + await pb.write({ + type: HolePunch.Type.CONNECT, + observedAddresses: this.addressManager.getAddresses().map(ma => ma.bytes) + }) - this.log('A receiving sync') - const sync = await pb.read(options) + this.log('A receiving sync') + const sync = await pb.read(options) - if (sync.type !== HolePunch.Type.SYNC) { - throw new InvalidMessageError('DCUtR message type was incorrect') - } + if (sync.type !== HolePunch.Type.SYNC) { + throw new InvalidMessageError('DCUtR message type was incorrect') + } - // TODO: when we have a QUIC transport, the dial step is different - for - // now we only have tcp support - // https://github.com/libp2p/specs/blob/master/relay/DCUtR.md#the-protocol + // TODO: when we have a QUIC transport, the dial step is different - for + // now we only have tcp support + // https://github.com/libp2p/specs/blob/master/relay/DCUtR.md#the-protocol - // Upon receiving the Sync, A immediately dials the address to B - this.log('A dialing', multiaddrs) - const connection = await this.connectionManager.openConnection(multiaddrs, { - signal: options.signal, - priority: DCUTR_DIAL_PRIORITY, - force: true - }) + // Upon receiving the Sync, A immediately dials the address to B + this.log('A dialing', multiaddrs) + const connection = await this.connectionManager.openConnection(multiaddrs, { + signal: options.signal, + priority: DCUTR_DIAL_PRIORITY, + force: true + }) - this.log('DCUtR to %p succeeded via %a, closing relayed connection', relayedConnection.remotePeer, connection.remoteAddr) - await relayedConnection.close(options) - } catch (err: any) { - this.log.error('incoming DCUtR from %p failed', relayedConnection.remotePeer, err) - stream.abort(err) - } finally { - await stream.close(options) - } + this.log('DCUtR to %p succeeded via %a, closing relayed connection', relayedConnection.remotePeer, connection.remoteAddr) + await relayedConnection.close(options) + await stream.closeWrite(options) } /** diff --git a/packages/protocol-dcutr/src/utils.ts b/packages/protocol-dcutr/src/utils.ts index f285a00371..542157f0c6 100644 --- a/packages/protocol-dcutr/src/utils.ts +++ b/packages/protocol-dcutr/src/utils.ts @@ -1,4 +1,4 @@ -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { isPrivateIp } from '@libp2p/utils' import { Circuit, IP, DNS } from '@multiformats/multiaddr-matcher' import type { TransportManager } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' diff --git a/packages/protocol-echo/package.json b/packages/protocol-echo/package.json index e5e497eafe..860977a170 100644 --- a/packages/protocol-echo/package.json +++ b/packages/protocol-echo/package.json @@ -46,15 +46,12 @@ "dependencies": { "@libp2p/interface": "^2.10.5", "@libp2p/interface-internal": "^2.3.18", - "@multiformats/multiaddr": "^12.4.4", - "it-byte-stream": "^2.0.2", - "it-pipe": "^3.0.1" + "@libp2p/utils": "^6.7.1", + "@multiformats/multiaddr": "^12.4.4" }, "devDependencies": { - "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", "it-all": "^3.0.8", - "it-pair": "^2.0.6", "sinon": "^20.0.0", "sinon-ts": "^2.0.0", "uint8arraylist": "^2.4.8" diff --git a/packages/protocol-echo/src/echo.ts b/packages/protocol-echo/src/echo.ts index 8ed0560d11..0ad7de61c6 100644 --- a/packages/protocol-echo/src/echo.ts +++ b/packages/protocol-echo/src/echo.ts @@ -1,5 +1,4 @@ -import { byteStream } from 'it-byte-stream' -import { pipe } from 'it-pipe' +import { byteStream } from '@libp2p/utils' import { PROTOCOL_NAME, PROTOCOL_VERSION } from './constants.js' import type { Echo as EchoInterface, EchoComponents, EchoInit } from './index.js' import type { AbortOptions, PeerId, Startable } from '@libp2p/interface' @@ -13,24 +12,35 @@ export class Echo implements Startable, EchoInterface { private readonly components: EchoComponents private started: boolean private readonly init: EchoInit + private readonly timeout: number constructor (components: EchoComponents, init: EchoInit = {}) { this.started = false this.components = components this.protocol = `/${[init.protocolPrefix, PROTOCOL_NAME, PROTOCOL_VERSION].filter(Boolean).join('/')}` this.init = init + this.timeout = init.timeout ?? 5_000 } readonly [Symbol.toStringTag] = '@libp2p/echo' async start (): Promise { - await this.components.registrar.handle(this.protocol, ({ stream, connection }) => { - const log = connection.log.newScope('echo') + await this.components.registrar.handle(this.protocol, async (stream, connection) => { + const log = stream.log.newScope('echo') + const start = Date.now() + const signal = AbortSignal.timeout(this.timeout) + let bytes = 0 - void pipe(stream, stream) - .catch((err: any) => { - log.error('error piping stream', err) - }) + for await (const buf of stream) { + bytes += buf.byteLength + stream.send(buf) + } + + log('echoed %d bytes in %dms', bytes, Date.now() - start) + + await stream.closeWrite({ + signal + }) }, { maxInboundStreams: this.init.maxInboundStreams, maxOutboundStreams: this.init.maxOutboundStreams, @@ -54,7 +64,9 @@ export class Echo implements Startable, EchoInterface { ...this.init, ...options }) - const bytes = byteStream(stream) + const bytes = byteStream(stream, { + maxBufferSize: buf.byteLength + }) const [, output] = await Promise.all([ bytes.write(buf, options), @@ -64,7 +76,7 @@ export class Echo implements Startable, EchoInterface { }) ]) - await stream.close(options) + await stream.closeWrite(options) return output.subarray() } diff --git a/packages/protocol-echo/src/index.ts b/packages/protocol-echo/src/index.ts index 7d16c57eea..33e34764b7 100644 --- a/packages/protocol-echo/src/index.ts +++ b/packages/protocol-echo/src/index.ts @@ -52,6 +52,7 @@ export interface EchoInit { maxInboundStreams?: number maxOutboundStreams?: number runOnLimitedConnection?: boolean + timeout?: number } export interface EchoComponents { diff --git a/packages/protocol-echo/test/index.spec.ts b/packages/protocol-echo/test/index.spec.ts index 0f0771513e..592af27fd8 100644 --- a/packages/protocol-echo/test/index.spec.ts +++ b/packages/protocol-echo/test/index.spec.ts @@ -1,17 +1,15 @@ /* eslint-env mocha */ import { start, stop } from '@libp2p/interface' -import { defaultLogger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import all from 'it-all' -import { duplexPair } from 'it-pair/duplex' -import { pipe } from 'it-pipe' import sinon from 'sinon' import { stubInterface } from 'sinon-ts' import { Uint8ArrayList } from 'uint8arraylist' import { Echo } from '../src/echo.js' -import type { Connection, Stream } from '@libp2p/interface' +import type { Connection } from '@libp2p/interface' import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { StubbedInstance } from 'sinon-ts' @@ -49,55 +47,27 @@ describe('echo', () => { it('should echo data', async () => { await start(echo) - const duplex = duplexPair() - const outgoingStream = stubInterface() - outgoingStream.source = duplex[0].source - outgoingStream.sink.callsFake(async source => duplex[0].sink(source)) - - const incomingStream = stubInterface() - incomingStream.source = duplex[1].source - incomingStream.sink.callsFake(async source => duplex[1].sink(source)) + const [outgoingStream, incomingStream] = await streamPair() const handler = components.registrar.handle.getCall(0).args[1] - handler({ - stream: incomingStream, - connection: stubInterface({ - log: defaultLogger().forComponent('connection') - }) - }) + handler(incomingStream, stubInterface()) + const output = all(outgoingStream) const input = Uint8Array.from([0, 1, 2, 3]) - const output = await pipe( - [input], - outgoingStream, - async (source) => { - return new Uint8ArrayList(...(await all(source))).subarray() - } - ) + outgoingStream.send(input) + outgoingStream.closeWrite() - expect(output).to.equalBytes(input) + expect(new Uint8ArrayList(...(await output)).subarray()).to.equalBytes(input) }) it('should echo data using method', async () => { await start(echo) - const duplex = duplexPair() - const outgoingStream = stubInterface() - outgoingStream.source = duplex[0].source - outgoingStream.sink.callsFake(async source => duplex[0].sink(source)) - - const incomingStream = stubInterface() - incomingStream.source = duplex[1].source - incomingStream.sink.callsFake(async source => duplex[1].sink(source)) + const [outgoingStream, incomingStream] = await streamPair() const handler = components.registrar.handle.getCall(0).args[1] - handler({ - stream: incomingStream, - connection: stubInterface({ - log: defaultLogger().forComponent('connection') - }) - }) + handler(incomingStream, stubInterface()) const ma = multiaddr('/ip4/123.123.123.123/tcp/1234') diff --git a/packages/protocol-fetch/package.json b/packages/protocol-fetch/package.json index eb61efc771..1b175dbaed 100644 --- a/packages/protocol-fetch/package.json +++ b/packages/protocol-fetch/package.json @@ -47,7 +47,7 @@ "dependencies": { "@libp2p/interface": "^2.10.5", "@libp2p/interface-internal": "^2.3.18", - "it-protobuf-stream": "^2.0.2", + "@libp2p/utils": "^6.7.1", "main-event": "^1.0.1", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8", @@ -55,10 +55,8 @@ }, "devDependencies": { "@libp2p/crypto": "^5.1.7", - "@libp2p/logger": "^5.1.21", "@libp2p/peer-id": "^5.1.8", "aegir": "^47.0.14", - "it-pair": "^2.0.6", "protons": "^7.6.1", "sinon": "^20.0.0", "sinon-ts": "^2.0.0" diff --git a/packages/protocol-fetch/src/fetch.ts b/packages/protocol-fetch/src/fetch.ts index 739fed0d25..800af6ec2d 100644 --- a/packages/protocol-fetch/src/fetch.ts +++ b/packages/protocol-fetch/src/fetch.ts @@ -1,12 +1,12 @@ -import { AbortError, InvalidMessageError, InvalidParametersError, ProtocolError } from '@libp2p/interface' -import { pbStream } from 'it-protobuf-stream' +import { InvalidMessageError, InvalidParametersError, ProtocolError } from '@libp2p/interface' +import { pbStream } from '@libp2p/utils' import { setMaxListeners } from 'main-event' import { fromString as uint8arrayFromString } from 'uint8arrays/from-string' import { toString as uint8arrayToString } from 'uint8arrays/to-string' import { PROTOCOL_NAME, PROTOCOL_VERSION } from './constants.js' import { FetchRequest, FetchResponse } from './pb/proto.js' import type { Fetch as FetchInterface, FetchComponents, FetchInit, LookupFunction } from './index.js' -import type { AbortOptions, Stream, PeerId, Startable, IncomingStreamData } from '@libp2p/interface' +import type { AbortOptions, Stream, PeerId, Startable } from '@libp2p/interface' const DEFAULT_TIMEOUT = 10_000 @@ -28,24 +28,15 @@ export class Fetch implements Startable, FetchInterface { this.components = components this.protocol = `/${init.protocolPrefix ?? 'libp2p'}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}` this.lookupFunctions = new Map() // Maps key prefix to value lookup function - this.handleMessage = this.handleMessage.bind(this) this.init = init + + this.handleMessage = this.handleMessage.bind(this) } readonly [Symbol.toStringTag] = '@libp2p/fetch' async start (): Promise { - await this.components.registrar.handle(this.protocol, (data) => { - const log = data.connection.log.newScope('fetch') - - void this.handleMessage(data) - .then(async () => { - await data.stream.close() - }) - .catch(err => { - log.error('error handling message - %e', err) - }) - }, { + await this.components.registrar.handle(this.protocol, this.handleMessage, { maxInboundStreams: this.init.maxInboundStreams, maxOutboundStreams: this.init.maxOutboundStreams }) @@ -69,33 +60,25 @@ export class Fetch implements Startable, FetchInterface { key = uint8arrayFromString(key) } - const connection = await this.components.connectionManager.openConnection(peer, options) - const log = connection.log.newScope('fetch') - let signal = options.signal - let stream: Stream | undefined - let onAbort = (): void => {} - // create a timeout if no abort signal passed - if (signal == null) { + if (options.signal == null) { const timeout = this.init.timeout ?? DEFAULT_TIMEOUT - log.trace('using default timeout of %d ms', timeout) - signal = AbortSignal.timeout(timeout) - + const signal = AbortSignal.timeout(timeout) setMaxListeners(Infinity, signal) - } - try { - stream = await connection.newStream(this.protocol, { + options = { + ...options, signal - }) - - onAbort = () => { - stream?.abort(new AbortError()) } + } + + let stream: Stream | undefined - // make stream abortable - signal.addEventListener('abort', onAbort, { once: true }) + try { + const connection = await this.components.connectionManager.openConnection(peer, options) + stream = await connection.newStream(this.protocol, options) + const log = stream.log.newScope('fetch') log.trace('fetch %m', key) const pb = pbStream(stream) @@ -104,7 +87,8 @@ export class Fetch implements Startable, FetchInterface { }, FetchRequest, options) const response = await pb.read(FetchResponse, options) - await pb.unwrap().close(options) + + await stream.closeWrite(options) switch (response.status) { case (FetchResponse.StatusCode.OK): { @@ -128,11 +112,6 @@ export class Fetch implements Startable, FetchInterface { } catch (err: any) { stream?.abort(err) throw err - } finally { - signal.removeEventListener('abort', onAbort) - if (stream != null) { - await stream.close() - } } } @@ -141,57 +120,51 @@ export class Fetch implements Startable, FetchInterface { * responds based on looking up the key in the request via the lookup callback that corresponds * to the key's prefix. */ - async handleMessage (data: IncomingStreamData): Promise { - const { stream, connection } = data - const log = connection.log.newScope('fetch') + async handleMessage (stream: Stream): Promise { + const log = stream.log.newScope('fetch') const signal = AbortSignal.timeout(this.init.timeout ?? DEFAULT_TIMEOUT) - try { - const pb = pbStream(stream) - const request = await pb.read(FetchRequest, { - signal - }) + const pb = pbStream(stream) + const request = await pb.read(FetchRequest, { + signal + }) - let response: FetchResponse - const key = uint8arrayToString(request.identifier) + let response: FetchResponse + const key = uint8arrayToString(request.identifier) - const lookup = this._getLookupFunction(key) + const lookup = this._getLookupFunction(key) - if (lookup == null) { - log.trace('sending status ERROR for %m', request.identifier) - const errMsg = uint8arrayFromString('No lookup function registered for key') - response = { status: FetchResponse.StatusCode.ERROR, data: errMsg } - } else { - log.trace('lookup data with identifier %s', lookup.prefix) - - try { - const data = await lookup.fn(request.identifier) - - if (data == null) { - log.trace('sending status NOT_FOUND for %m', request.identifier) - response = { status: FetchResponse.StatusCode.NOT_FOUND, data: new Uint8Array(0) } - } else { - log.trace('sending status OK for %m', request.identifier) - response = { status: FetchResponse.StatusCode.OK, data } - } - } catch (err: any) { - log.error('error during lookup of %m - %e', request.identifier, err) - const errMsg = uint8arrayFromString(err.message) - response = { status: FetchResponse.StatusCode.ERROR, data: errMsg } + if (lookup == null) { + log.trace('sending status ERROR for %m', request.identifier) + const errMsg = uint8arrayFromString('No lookup function registered for key') + response = { status: FetchResponse.StatusCode.ERROR, data: errMsg } + } else { + log.trace('lookup data with identifier %s', lookup.prefix) + + try { + const data = await lookup.fn(request.identifier) + + if (data == null) { + log.trace('sending status NOT_FOUND for %m', request.identifier) + response = { status: FetchResponse.StatusCode.NOT_FOUND, data: new Uint8Array(0) } + } else { + log.trace('sending status OK for %m', request.identifier) + response = { status: FetchResponse.StatusCode.OK, data } } + } catch (err: any) { + log.error('error during lookup of %m - %e', request.identifier, err) + const errMsg = uint8arrayFromString(err.message) + response = { status: FetchResponse.StatusCode.ERROR, data: errMsg } } + } - await pb.write(response, FetchResponse, { - signal - }) + await pb.write(response, FetchResponse, { + signal + }) - await pb.unwrap().close({ - signal - }) - } catch (err: any) { - log.error('error answering fetch request - %e', err) - stream.abort(err) - } + await stream.closeWrite({ + signal + }) } /** diff --git a/packages/protocol-fetch/test/index.spec.ts b/packages/protocol-fetch/test/index.spec.ts index 73c01f27eb..0c19147b0d 100644 --- a/packages/protocol-fetch/test/index.spec.ts +++ b/packages/protocol-fetch/test/index.spec.ts @@ -2,11 +2,9 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' -import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { expect } from 'aegir/chai' -import { duplexPair } from 'it-pair/duplex' -import { pbStream } from 'it-protobuf-stream' import sinon from 'sinon' import { stubInterface } from 'sinon-ts' import { fromString as uint8arrayFromString } from 'uint8arrays/from-string' @@ -29,19 +27,10 @@ async function createComponents (): Promise { } } -function createStreams (components: StubbedFetchComponents, remotePeer?: PeerId): { incomingStream: StubbedInstance, outgoingStream: StubbedInstance, connection: StubbedInstance } { - const duplex = duplexPair() - const outgoingStream = stubInterface() - outgoingStream.source = duplex[0].source - outgoingStream.sink.callsFake(async source => duplex[0].sink(source)) +async function createStreams (components: StubbedFetchComponents, remotePeer?: PeerId): Promise<{ incomingStream: Stream, outgoingStream: Stream, connection: StubbedInstance }> { + const [outgoingStream, incomingStream] = await streamPair() - const incomingStream = stubInterface() - incomingStream.source = duplex[1].source - incomingStream.sink.callsFake(async source => duplex[1].sink(source)) - - const connection = stubInterface({ - log: defaultLogger().forComponent('connection') - }) + const connection = stubInterface() if (remotePeer != null) { connection.newStream.withArgs('/libp2p/fetch/0.0.1').resolves(outgoingStream) @@ -85,7 +74,7 @@ describe('fetch', () => { const { incomingStream - } = createStreams(components, remotePeer) + } = await createStreams(components, remotePeer) const result = fetch.fetch(remotePeer, key) @@ -108,7 +97,7 @@ describe('fetch', () => { const { incomingStream - } = createStreams(components, remotePeer) + } = await createStreams(components, remotePeer) const result = fetch.fetch(remotePeer, key) @@ -130,7 +119,7 @@ describe('fetch', () => { const { incomingStream - } = createStreams(components, remotePeer) + } = await createStreams(components, remotePeer) const result = fetch.fetch(remotePeer, key) @@ -153,18 +142,14 @@ describe('fetch', () => { const { outgoingStream - } = createStreams(components, remotePeer) - - outgoingStream.abort.callsFake((err) => { - void outgoingStream.source.throw(err) - }) + } = await createStreams(components, remotePeer) await expect(fetch.fetch(remotePeer, key, { signal: AbortSignal.timeout(10) })).to.eventually.be.rejected .with.property('name', 'AbortError') - expect(outgoingStream.abort.called).to.be.true() + expect(outgoingStream).to.have.property('status', 'aborted') }) }) @@ -175,19 +160,16 @@ describe('fetch', () => { const { incomingStream, - outgoingStream, - connection - } = createStreams(components) + outgoingStream + } = await createStreams(components) fetch.registerLookupFunction('/test', async (k) => { expect(k).to.equalBytes(uint8arrayFromString(key)) return value }) - void fetch.handleMessage({ - stream: incomingStream, - connection - }) + fetch.handleMessage(incomingStream) + ?.catch(() => {}) const pb = pbStream(outgoingStream) @@ -196,7 +178,7 @@ describe('fetch', () => { }, FetchRequest) const response = await pb.read(FetchResponse) - expect(response.status).to.equal(FetchResponse.StatusCode.OK) + expect(response).to.have.property('status', FetchResponse.StatusCode.OK) expect(response.data).to.equalBytes(value) }) @@ -205,18 +187,15 @@ describe('fetch', () => { const { incomingStream, - outgoingStream, - connection - } = createStreams(components) + outgoingStream + } = await createStreams(components) fetch.registerLookupFunction('/test', async (k) => { return undefined }) - void fetch.handleMessage({ - stream: incomingStream, - connection - }) + fetch.handleMessage(incomingStream) + ?.catch(() => {}) const pb = pbStream(outgoingStream) @@ -225,7 +204,7 @@ describe('fetch', () => { }, FetchRequest) const response = await pb.read(FetchResponse) - expect(response.status).to.equal(FetchResponse.StatusCode.NOT_FOUND) + expect(response).to.have.property('status', FetchResponse.StatusCode.NOT_FOUND) }) it('should handle not having a handler for the key', async () => { @@ -233,14 +212,11 @@ describe('fetch', () => { const { incomingStream, - outgoingStream, - connection - } = createStreams(components) + outgoingStream + } = await createStreams(components) - void fetch.handleMessage({ - stream: incomingStream, - connection - }) + fetch.handleMessage(incomingStream) + ?.catch(() => {}) const pb = pbStream(outgoingStream) @@ -252,23 +228,23 @@ describe('fetch', () => { expect(response.status).to.equal(FetchResponse.StatusCode.ERROR) }) - it('should time out sending data to another peer waiting for the request', async () => { + it('should throw when timing out sending data to another peer waiting for the request', async () => { fetch = new Fetch(components, { timeout: 10 }) const { - incomingStream, - connection - } = createStreams(components) + incomingStream + } = await createStreams(components) - await fetch.handleMessage({ - stream: incomingStream, - connection - }) + const errorPromise = Promise.withResolvers() + + fetch.handleMessage(incomingStream) + ?.catch((err) => { + errorPromise.resolve(err) + }) - expect(incomingStream.abort.called).to.be.true() - expect(incomingStream.abort.getCall(0).args[0]).to.have.property('name', 'AbortError') + await expect(errorPromise.promise).to.eventually.have.property('name', 'AbortError') }) }) }) diff --git a/packages/protocol-identify/package.json b/packages/protocol-identify/package.json index 108bddfaf5..f2da7f3f95 100644 --- a/packages/protocol-identify/package.json +++ b/packages/protocol-identify/package.json @@ -55,7 +55,6 @@ "@multiformats/multiaddr-matcher": "^2.0.0", "it-drain": "^3.0.9", "it-parallel": "^3.0.11", - "it-protobuf-stream": "^2.0.2", "main-event": "^1.0.1", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8", @@ -66,8 +65,6 @@ "aegir": "^47.0.14", "delay": "^6.0.0", "it-length-prefixed": "^10.0.1", - "it-pair": "^2.0.6", - "it-pushable": "^3.2.3", "protons": "^7.6.1", "sinon-ts": "^2.0.0" }, diff --git a/packages/protocol-identify/src/identify-push.ts b/packages/protocol-identify/src/identify-push.ts index 189fe165b0..c77dac1e8b 100644 --- a/packages/protocol-identify/src/identify-push.ts +++ b/packages/protocol-identify/src/identify-push.ts @@ -1,10 +1,9 @@ import { serviceCapabilities } from '@libp2p/interface' import { RecordEnvelope, PeerRecord } from '@libp2p/peer-record' -import { debounce } from '@libp2p/utils/debounce' +import { debounce, pbStream } from '@libp2p/utils' import { protocols } from '@multiformats/multiaddr' import drain from 'it-drain' import parallel from 'it-parallel' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' @@ -16,7 +15,7 @@ import { import { Identify as IdentifyMessage } from './pb/message.js' import { AbstractIdentify, consumeIdentifyMessage, defaultValues } from './utils.js' import type { IdentifyPush as IdentifyPushInterface, IdentifyPushComponents, IdentifyPushInit } from './index.js' -import type { Stream, Startable, IncomingStreamData } from '@libp2p/interface' +import type { Stream, Startable, Connection } from '@libp2p/interface' import type { ConnectionManager } from '@libp2p/interface-internal' export class IdentifyPush extends AbstractIdentify implements Startable, IdentifyPushInterface { @@ -79,7 +78,6 @@ export class IdentifyPush extends AbstractIdentify implements Startable, Identif async function * pushToConnections (): AsyncGenerator<() => Promise> { for (const connection of self.connectionManager.getConnections()) { const peer = await self.peerStore.get(connection.remotePeer) - const log = connection.log.newScope('identify-push') if (!peer.protocols.includes(self.protocol)) { continue @@ -111,12 +109,13 @@ export class IdentifyPush extends AbstractIdentify implements Startable, Identif signal }) - await stream.close({ + await stream.closeWrite({ signal }) } catch (err: any) { - // Just log errors - log.error('could not push identify update to peer', err) + // Just log errors if the stream was opened + const log = stream?.log.newScope('identify-push') + log?.error('could not push identify update to peer', err) stream?.abort(err) } } @@ -134,32 +133,25 @@ export class IdentifyPush extends AbstractIdentify implements Startable, Identif /** * Reads the Identify Push message from the given `connection` */ - async handleProtocol (data: IncomingStreamData): Promise { - const { connection, stream } = data - const log = connection.log.newScope('identify-push') + async handleProtocol (stream: Stream, connection: Connection): Promise { + const log = stream.log.newScope('identify-push') - try { - if (this.peerId.equals(connection.remotePeer)) { - throw new Error('received push from ourselves?') - } + if (this.peerId.equals(connection.remotePeer)) { + throw new Error('received push from ourselves?') + } - const options = { - signal: AbortSignal.timeout(this.timeout) - } + const options = { + signal: AbortSignal.timeout(this.timeout) + } - const pb = pbStream(stream, { - maxDataLength: this.maxMessageSize - }).pb(IdentifyMessage) + const pb = pbStream(stream, { + maxDataLength: this.maxMessageSize + }).pb(IdentifyMessage) - const message = await pb.read(options) - await stream.close(options) + const message = await pb.read(options) + await stream.closeWrite(options) - await consumeIdentifyMessage(this.peerStore, this.events, log, connection, message) - } catch (err: any) { - log.error('received invalid message', err) - stream.abort(err) - return - } + await consumeIdentifyMessage(this.peerStore, this.events, log, connection, message) log.trace('handled push from %p', connection.remotePeer) } diff --git a/packages/protocol-identify/src/identify.ts b/packages/protocol-identify/src/identify.ts index edc35b02e8..3245e22ee9 100644 --- a/packages/protocol-identify/src/identify.ts +++ b/packages/protocol-identify/src/identify.ts @@ -2,11 +2,9 @@ import { publicKeyFromProtobuf, publicKeyToProtobuf } from '@libp2p/crypto/keys' import { InvalidMessageError, UnsupportedProtocolError, serviceCapabilities } from '@libp2p/interface' import { peerIdFromCID } from '@libp2p/peer-id' import { RecordEnvelope, PeerRecord } from '@libp2p/peer-record' -import { isGlobalUnicast } from '@libp2p/utils/multiaddr/is-global-unicast' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' +import { isGlobalUnicast, isPrivate, pbStream } from '@libp2p/utils' import { CODE_IP6, CODE_IP6ZONE, protocols } from '@multiformats/multiaddr' import { IP_OR_DOMAIN, TCP } from '@multiformats/multiaddr-matcher' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import { MULTICODEC_IDENTIFY_PROTOCOL_NAME, @@ -15,7 +13,7 @@ import { import { Identify as IdentifyMessage } from './pb/message.js' import { AbstractIdentify, consumeIdentifyMessage, defaultValues, getCleanMultiaddr } from './utils.js' import type { Identify as IdentifyInterface, IdentifyComponents, IdentifyInit } from './index.js' -import type { IdentifyResult, AbortOptions, Connection, Stream, Startable, IncomingStreamData, Logger } from '@libp2p/interface' +import type { IdentifyResult, AbortOptions, Connection, Stream, Startable } from '@libp2p/interface' export class Identify extends AbstractIdentify implements Startable, IdentifyInterface { constructor (components: IdentifyComponents, init: IdentifyInit = {}) { @@ -71,7 +69,7 @@ export class Identify extends AbstractIdentify implements Startable, IdentifyInt const message = await pb.read(options) - await stream.close(options) + await stream.closeWrite(options) return message } catch (err: any) { @@ -94,7 +92,6 @@ export class Identify extends AbstractIdentify implements Startable, IdentifyInt const key = publicKeyFromProtobuf(publicKey) const id = peerIdFromCID(key.toCID()) - const log = connection.log.newScope('identify') if (!connection.remotePeer.equals(id)) { throw new InvalidMessageError('Identified peer does not match the expected peer') @@ -106,31 +103,30 @@ export class Identify extends AbstractIdentify implements Startable, IdentifyInt // if the observed address is publicly routable, add it to the address // manager for verification via AutoNAT - this.maybeAddObservedAddress(observedAddr, log) + this.maybeAddObservedAddress(observedAddr) - log('completed for peer %p and protocols %o', id, protocols) + this.log('completed for peer %p and protocols %o', id, protocols) - return consumeIdentifyMessage(this.peerStore, this.events, log, connection, message) + return consumeIdentifyMessage(this.peerStore, this.events, this.log, connection, message) } - private maybeAddObservedAddress (observedAddr: Uint8Array | undefined, log: Logger): void { + private maybeAddObservedAddress (observedAddr: Uint8Array | undefined): void { const cleanObservedAddr = getCleanMultiaddr(observedAddr) if (cleanObservedAddr == null) { return } - log.trace('our observed address was %a', cleanObservedAddr) + this.log.trace('our observed address was %a', cleanObservedAddr) if (isPrivate(cleanObservedAddr)) { - this.log.trace('our observed address was private') return } const tuples = cleanObservedAddr.getComponents() if (((tuples[0].code === CODE_IP6) || (tuples[0].code === CODE_IP6ZONE && tuples[1].code === CODE_IP6)) && !isGlobalUnicast(cleanObservedAddr)) { - log.trace('our observed address was IPv6 but not a global unicast address') + this.log.trace('our observed address was IPv6 but not a global unicast address') return } @@ -142,7 +138,7 @@ export class Identify extends AbstractIdentify implements Startable, IdentifyInt return } - log.trace('storing the observed address') + this.log.trace('storing the observed address') this.addressManager.addObservedAddr(cleanObservedAddr) } @@ -150,55 +146,46 @@ export class Identify extends AbstractIdentify implements Startable, IdentifyInt * Sends the `Identify` response with the Signed Peer Record * to the requesting peer over the given `connection` */ - async handleProtocol (data: IncomingStreamData): Promise { - const { connection, stream } = data - const log = connection.log.newScope('identify') - + async handleProtocol (stream: Stream, connection: Connection): Promise { const signal = AbortSignal.timeout(this.timeout) - setMaxListeners(Infinity, signal) - try { - const peerData = await this.peerStore.get(this.peerId) - const multiaddrs = this.addressManager.getAddresses().map(ma => ma.decapsulateCode(protocols('p2p').code)) - let signedPeerRecord = peerData.peerRecordEnvelope - - if (multiaddrs.length > 0 && signedPeerRecord == null) { - const peerRecord = new PeerRecord({ - peerId: this.peerId, - multiaddrs - }) - - const envelope = await RecordEnvelope.seal(peerRecord, this.privateKey) - signedPeerRecord = envelope.marshal().subarray() - } + const peerData = await this.peerStore.get(this.peerId) + const multiaddrs = this.addressManager.getAddresses().map(ma => ma.decapsulateCode(protocols('p2p').code)) + let signedPeerRecord = peerData.peerRecordEnvelope - let observedAddr: Uint8Array | undefined = connection.remoteAddr.bytes + if (multiaddrs.length > 0 && signedPeerRecord == null) { + const peerRecord = new PeerRecord({ + peerId: this.peerId, + multiaddrs + }) - if (!IP_OR_DOMAIN.matches(connection.remoteAddr)) { - observedAddr = undefined - } + const envelope = await RecordEnvelope.seal(peerRecord, this.privateKey) + signedPeerRecord = envelope.marshal().subarray() + } - const pb = pbStream(stream).pb(IdentifyMessage) - - await pb.write({ - protocolVersion: this.host.protocolVersion, - agentVersion: this.host.agentVersion, - publicKey: publicKeyToProtobuf(this.privateKey.publicKey), - listenAddrs: multiaddrs.map(addr => addr.bytes), - signedPeerRecord, - observedAddr, - protocols: peerData.protocols - }, { - signal - }) + let observedAddr: Uint8Array | undefined = connection.remoteAddr.bytes - await stream.close({ - signal - }) - } catch (err: any) { - log.error('could not respond to identify request', err) - stream.abort(err) + if (!IP_OR_DOMAIN.matches(connection.remoteAddr)) { + observedAddr = undefined } + + const pb = pbStream(stream).pb(IdentifyMessage) + + await pb.write({ + protocolVersion: this.host.protocolVersion, + agentVersion: this.host.agentVersion, + publicKey: publicKeyToProtobuf(this.privateKey.publicKey), + listenAddrs: multiaddrs.map(addr => addr.bytes), + signedPeerRecord, + observedAddr, + protocols: peerData.protocols + }, { + signal + }) + + await stream.closeWrite({ + signal + }) } } diff --git a/packages/protocol-identify/src/utils.ts b/packages/protocol-identify/src/utils.ts index 046da832c8..34fee5a7d4 100644 --- a/packages/protocol-identify/src/utils.ts +++ b/packages/protocol-identify/src/utils.ts @@ -7,7 +7,7 @@ import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { IDENTIFY_PROTOCOL_VERSION, MAX_IDENTIFY_MESSAGE_SIZE, MAX_PUSH_CONCURRENCY } from './consts.js' import type { IdentifyComponents, IdentifyInit } from './index.js' import type { Identify as IdentifyMessage } from './pb/message.js' -import type { Libp2pEvents, IdentifyResult, SignedPeerRecord, Logger, Connection, Peer, PeerData, PeerStore, NodeInfo, Startable, PeerId, IncomingStreamData, PrivateKey } from '@libp2p/interface' +import type { Libp2pEvents, IdentifyResult, SignedPeerRecord, Logger, Connection, Peer, PeerData, PeerStore, NodeInfo, Startable, PeerId, PrivateKey, Stream } from '@libp2p/interface' import type { AddressManager, Registrar } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' import type { TypedEventTarget } from 'main-event' @@ -230,6 +230,8 @@ export abstract class AbstractIdentify implements Startable { protocolVersion: `${init.protocolPrefix ?? defaultValues.protocolPrefix}/${IDENTIFY_PROTOCOL_VERSION}`, agentVersion: getAgentVersion(components.nodeInfo, init.agentVersion) } + + this.handleProtocol = this.handleProtocol.bind(this) } isStarted (): boolean { @@ -248,11 +250,7 @@ export abstract class AbstractIdentify implements Startable { } }) - await this.registrar.handle(this.protocol, (data) => { - void this.handleProtocol(data).catch(err => { - this.log.error(err) - }) - }, { + await this.registrar.handle(this.protocol, this.handleProtocol, { maxInboundStreams: this.maxInboundStreams, maxOutboundStreams: this.maxOutboundStreams, runOnLimitedConnection: this.runOnLimitedConnection @@ -267,5 +265,5 @@ export abstract class AbstractIdentify implements Startable { this.started = false } - protected abstract handleProtocol (data: IncomingStreamData): Promise + protected abstract handleProtocol (stream: Stream, connection: Connection): Promise } diff --git a/packages/protocol-identify/test/fixtures/index.ts b/packages/protocol-identify/test/fixtures/index.ts deleted file mode 100644 index 0da0e5e1f4..0000000000 --- a/packages/protocol-identify/test/fixtures/index.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { defaultLogger } from '@libp2p/logger' -import drain from 'it-drain' -import * as lp from 'it-length-prefixed' -import { pushable } from 'it-pushable' -import { stubInterface } from 'sinon-ts' -import { Uint8ArrayList } from 'uint8arraylist' -import { Identify as IdentifyMessage } from '../../src/pb/message.js' -import type { ComponentLogger, Libp2pEvents, NodeInfo, PeerId, PeerStore, Connection, Stream, PrivateKey } from '@libp2p/interface' -import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' -import type { TypedEventTarget } from 'main-event' -import type { StubbedInstance } from 'sinon-ts' - -export interface StubbedIdentifyComponents { - peerId: PeerId - privateKey: PrivateKey - peerStore: StubbedInstance - connectionManager: StubbedInstance - registrar: StubbedInstance - addressManager: StubbedInstance - events: TypedEventTarget - logger: ComponentLogger - nodeInfo: NodeInfo -} - -export function connectionStream (remotePeer: PeerId, protocol: string): { connection: StubbedInstance, stream: StubbedInstance } { - const connection = stubInterface({ - remotePeer, - log: defaultLogger().forComponent('connection') - }) - const stream = stubInterface() - connection.newStream.withArgs(protocol).resolves(stream) - - stream.sink.callsFake(async (source) => { - await drain(source) - }) - - return { connection, stream } -} - -export function identifyStream (remotePeer: PeerId): { connection: StubbedInstance, stream: StubbedInstance } { - return connectionStream(remotePeer, '/ipfs/id/1.0.0') -} - -export function identifyPushStream (remotePeer: PeerId): { connection: StubbedInstance, stream: StubbedInstance } { - return connectionStream(remotePeer, '/ipfs/id/push/1.0.0') -} - -export function identifyConnection (remotePeer: PeerId, message: IdentifyMessage): StubbedInstance { - const { connection, stream } = identifyStream(remotePeer) - - const input = stream.source = pushable() - void input.push(new Uint8ArrayList(lp.encode.single(IdentifyMessage.encode(message)))) - - return connection -} diff --git a/packages/protocol-identify/test/index.spec.ts b/packages/protocol-identify/test/index.spec.ts index 39dcb3c6b2..aeb806339d 100644 --- a/packages/protocol-identify/test/index.spec.ts +++ b/packages/protocol-identify/test/index.spec.ts @@ -3,24 +3,29 @@ import { start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' import { PeerRecord, RecordEnvelope } from '@libp2p/peer-record' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import delay from 'delay' -import drain from 'it-drain' import * as lp from 'it-length-prefixed' -import { duplexPair } from 'it-pair/duplex' -import { pbStream } from 'it-protobuf-stream' -import { pushable } from 'it-pushable' import { TypedEventEmitter } from 'main-event' import { stubInterface } from 'sinon-ts' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { Identify } from '../src/identify.js' import { Identify as IdentifyMessage } from '../src/pb/message.js' -import { identifyConnection, identifyStream } from './fixtures/index.js' -import type { StubbedIdentifyComponents } from './fixtures/index.js' -import type { Libp2pEvents, PeerStore, Connection, Stream } from '@libp2p/interface' -import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { Libp2pEvents, PeerStore, Connection, PeerId, PrivateKey, TypedEventTarget, ComponentLogger, NodeInfo } from '@libp2p/interface' +import type { AddressManager, Registrar } from '@libp2p/interface-internal' +import type { StubbedInstance } from 'sinon-ts' + +interface StubbedIdentifyComponents { + peerId: PeerId + privateKey: PrivateKey + peerStore: StubbedInstance + registrar: StubbedInstance + addressManager: StubbedInstance + events: TypedEventTarget + logger: ComponentLogger + nodeInfo: NodeInfo +} describe('identify', () => { let components: StubbedIdentifyComponents @@ -34,7 +39,6 @@ describe('identify', () => { peerId, privateKey, peerStore: stubInterface(), - connectionManager: stubInterface(), registrar: stubInterface(), addressManager: stubInterface(), events: new TypedEventEmitter(), @@ -76,7 +80,12 @@ describe('identify', () => { publicKey: publicKeyToProtobuf(remotePeer.publicKey) } - const connection = identifyConnection(remotePeer, message) + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode(message))) + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify const response = await identify.identify(connection) @@ -94,11 +103,16 @@ describe('identify', () => { const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) const otherPeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const connection = identifyConnection(remotePeer, { + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode({ listenAddrs: [], protocols: [], publicKey: publicKeyToProtobuf(otherPeer.publicKey) + }))) + const connection = stubInterface({ + remotePeer }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify await expect(identify.identify(connection)) @@ -132,12 +146,14 @@ describe('identify', () => { await start(identify) const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const { connection, stream } = identifyStream(remotePeer) - // eslint-disable-next-line require-yield - stream.source = (async function * () { - await delay(timeout * 10) - })() + const [outgoingStream] = await streamPair({ + delay: 1_000 + }) + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify with timeout await expect(identify.identify(connection, { @@ -146,7 +162,7 @@ describe('identify', () => { .to.eventually.be.rejected.with.property('name', 'AbortError') // should have aborted stream - expect(stream.abort.called).to.be.true() + expect(outgoingStream).to.have.property('status', 'aborted') }) it('should limit incoming identify message sizes', async () => { @@ -159,22 +175,19 @@ describe('identify', () => { await start(identify) const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - - const { connection, stream } = identifyStream(remotePeer) - - const input = stream.source = pushable() - stream.sink.callsFake(async (source) => { - await drain(source) + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(new Uint8Array(maxMessageSize + 1))) + const connection = stubInterface({ + remotePeer }) - - void input.push(lp.encode.single(new Uint8Array(maxMessageSize + 1))) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify - await expect(identify.identify(connection)) - .to.eventually.be.rejected.with.property('name', 'InvalidDataLengthError') + await expect(identify.identify(connection)).to.eventually.be.rejected() + .with.property('name', 'InvalidDataLengthError') // should have aborted stream - expect(stream.abort.called).to.be.true() + expect(outgoingStream).to.have.property('status', 'aborted') }) it('should retain existing peer metadata when updating agent/protocol version', async () => { @@ -184,13 +197,18 @@ describe('identify', () => { const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const connection = identifyConnection(remotePeer, { + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode({ listenAddrs: [], protocols: [], publicKey: publicKeyToProtobuf(remotePeer.publicKey), agentVersion: 'secret-agent', protocolVersion: '9000' + }))) + const connection = stubInterface({ + remotePeer }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // peer exists in peer store components.peerStore.get.withArgs(remotePeer).resolves({ @@ -233,12 +251,17 @@ describe('identify', () => { seqNumber: BigInt(1n) }), remotePrivateKey) - const connection = identifyConnection(remotePeer, { + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode({ listenAddrs: [], protocols: [], publicKey: publicKeyToProtobuf(remotePeer.publicKey), signedPeerRecord: oldPeerRecord.marshal() + }))) + const connection = stubInterface({ + remotePeer }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // peer exists in peer store with existing signed peer record const signedPeerRecord = await RecordEnvelope.seal(new PeerRecord({ @@ -284,7 +307,12 @@ describe('identify', () => { publicKey: publicKeyToProtobuf(remotePeer.publicKey) } - const connection = identifyConnection(remotePeer, message) + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode(message))) + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify await identify.identify(connection) @@ -327,7 +355,12 @@ describe('identify', () => { signedPeerRecord: peerRecordEnvelope } - const connection = identifyConnection(remotePeer, message) + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode(message))) + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify await identify.identify(connection) @@ -348,10 +381,6 @@ describe('identify', () => { await start(identify) - const duplex = duplexPair() - const incomingStream = stubInterface(duplex[0]) - const outgoingStream = stubInterface(duplex[1]) - components.addressManager.getAddresses.returns([]) // local peer data @@ -363,14 +392,20 @@ describe('identify', () => { tags: new Map() }) - // handle identify - void identify.handleProtocol({ - stream: incomingStream, - connection: stubInterface({ - remoteAddr: multiaddr('/webrtc/p2p/QmR5VwgsL7jyfZHAGyp66tguVrQhCRQuRc3NokocsCZ3fA'), - log: defaultLogger().forComponent('connection') - }) + const message: IdentifyMessage = { + listenAddrs: [], + protocols: [] + } + + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode(message))) + const connection = stubInterface({ + remoteAddr: multiaddr('/webrtc/p2p/QmR5VwgsL7jyfZHAGyp66tguVrQhCRQuRc3NokocsCZ3fA') }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) + + // handle identify + void identify.handleProtocol(incomingStream, connection) const pb = pbStream(outgoingStream) const result = await pb.read(IdentifyMessage) @@ -394,7 +429,12 @@ describe('identify', () => { observedAddr: multiaddr('/ip6zone/en/ip6/fe80::2892:aef3:af04:735a').bytes } - const connection = identifyConnection(remotePeer, message) + const [outgoingStream, incomingStream] = await streamPair() + incomingStream.send(lp.encode.single(IdentifyMessage.encode(message))) + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/1.0.0').resolves(outgoingStream) // run identify await identify.identify(connection) diff --git a/packages/protocol-identify/test/push.spec.ts b/packages/protocol-identify/test/push.spec.ts index 3ef897fa63..b486b49a28 100644 --- a/packages/protocol-identify/test/push.spec.ts +++ b/packages/protocol-identify/test/push.spec.ts @@ -2,19 +2,29 @@ import { generateKeyPair, publicKeyToProtobuf } from '@libp2p/crypto/keys' import { start, stop } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import delay from 'delay' -import { pair } from 'it-pair' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter } from 'main-event' import { stubInterface } from 'sinon-ts' import { IdentifyPush } from '../src/identify-push.js' import { Identify as IdentifyMessage } from '../src/pb/message.js' -import { identifyPushStream } from './fixtures/index.js' -import type { StubbedIdentifyComponents } from './fixtures/index.js' -import type { Libp2pEvents, PeerStore } from '@libp2p/interface' +import type { ComponentLogger, Connection, Libp2pEvents, NodeInfo, PeerId, PeerStore, PrivateKey, TypedEventTarget } from '@libp2p/interface' import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' +import type { StubbedInstance } from 'sinon-ts' + +interface StubbedIdentifyComponents { + peerId: PeerId + privateKey: PrivateKey + peerStore: StubbedInstance + registrar: StubbedInstance + connectionManager: StubbedInstance + addressManager: StubbedInstance + events: TypedEventTarget + logger: ComponentLogger + nodeInfo: NodeInfo +} describe('identify (push)', () => { let components: StubbedIdentifyComponents @@ -60,10 +70,11 @@ describe('identify (push)', () => { await start(identify) const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const { stream, connection } = identifyPushStream(remotePeer) - const duplex = pair() - stream.source = duplex.source - stream.sink.callsFake(async (source) => duplex.sink(source)) + const [outgoingStream, incomingStream] = await streamPair() + const connection = stubInterface({ + remotePeer + }) + connection.newStream.withArgs('/ipfs/id/push/1.0.0').resolves(outgoingStream) components.connectionManager.getConnections.returns([ connection @@ -93,7 +104,7 @@ describe('identify (push)', () => { // push update to connections void identify.push() - const pb = pbStream(stream) + const pb = pbStream(incomingStream) const message = await pb.read(IdentifyMessage) expect(message.protocols).to.include('/super/fun/protocol') @@ -107,15 +118,15 @@ describe('identify (push)', () => { await start(identify) const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const { stream, connection } = identifyPushStream(remotePeer) - const duplex = pair() - stream.source = duplex.source - stream.sink.callsFake(async (source) => duplex.sink(source)) + const [outgoingStream, incomingStream] = await streamPair() + const connection = stubInterface({ + remotePeer + }) const updatedProtocol = '/special-new-protocol/1.0.0' const updatedAddress = multiaddr('/ip4/127.0.0.1/tcp/48322') - const pb = pbStream(stream) + const pb = pbStream(outgoingStream) void pb.write({ publicKey: publicKeyToProtobuf(remotePeer.publicKey), protocols: [ @@ -128,10 +139,7 @@ describe('identify (push)', () => { components.peerStore.patch.reset() - await identify.handleProtocol({ - stream, - connection - }) + await identify.handleProtocol(incomingStream, connection) expect(components.peerStore.patch.callCount).to.equal(1) const updatedId = components.peerStore.patch.getCall(0).args[0] @@ -150,20 +158,17 @@ describe('identify (push)', () => { await start(identify) const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) - const { stream, connection } = identifyPushStream(remotePeer) - const duplex = pair() - stream.source = duplex.source - stream.sink.callsFake(async (source) => duplex.sink(source)) + const [, incomingStream] = await streamPair() + const connection = stubInterface({ + remotePeer + }) components.peerStore.patch.reset() - await expect(identify.handleProtocol({ - stream, - connection - })).to.eventually.be.undefined() + await expect(identify.handleProtocol(incomingStream, connection)).to.eventually.be.rejected + .with.property('name', 'AbortError') expect(components.peerStore.patch.callCount).to.equal(0, 'patched peer when push timed out') - expect(stream.abort.callCount).to.equal(1, 'did not abort stream') }) it('should debounce outgoing pushes', async () => { diff --git a/packages/protocol-perf/package.json b/packages/protocol-perf/package.json index 64f7c6c1dc..4d88950f14 100644 --- a/packages/protocol-perf/package.json +++ b/packages/protocol-perf/package.json @@ -48,14 +48,15 @@ "@libp2p/interface": "^2.10.5", "@libp2p/interface-internal": "^2.3.18", "@multiformats/multiaddr": "^12.4.4", - "it-pushable": "^3.2.3" + "it-pushable": "^3.2.3", + "race-event": "^1.6.1", + "uint8arraylist": "^2.4.8" }, "devDependencies": { - "@libp2p/interface-compliance-tests": "^6.4.16", "@libp2p/logger": "^5.1.21", + "@libp2p/utils": "^6.7.1", "aegir": "^47.0.14", "it-last": "^3.0.8", - "it-pair": "^2.0.6", "sinon-ts": "^2.0.0" }, "sideEffects": false diff --git a/packages/protocol-perf/src/perf-service.ts b/packages/protocol-perf/src/perf-service.ts index a052ff79ea..1f250e0cbf 100644 --- a/packages/protocol-perf/src/perf-service.ts +++ b/packages/protocol-perf/src/perf-service.ts @@ -1,7 +1,9 @@ import { pushable } from 'it-pushable' +import { raceEvent } from 'race-event' +import { Uint8ArrayList } from 'uint8arraylist' import { MAX_INBOUND_STREAMS, MAX_OUTBOUND_STREAMS, PROTOCOL_NAME, RUN_ON_LIMITED_CONNECTION, WRITE_BLOCK_SIZE } from './constants.js' import type { PerfOptions, PerfOutput, PerfComponents, PerfInit, Perf as PerfInterface } from './index.js' -import type { Logger, Startable, IncomingStreamData } from '@libp2p/interface' +import type { Logger, Startable, Stream } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' export class Perf implements Startable, PerfInterface { @@ -25,16 +27,13 @@ export class Perf implements Startable, PerfInterface { this.maxInboundStreams = init.maxInboundStreams ?? MAX_INBOUND_STREAMS this.maxOutboundStreams = init.maxOutboundStreams ?? MAX_OUTBOUND_STREAMS this.runOnLimitedConnection = init.runOnLimitedConnection ?? RUN_ON_LIMITED_CONNECTION + this.handleMessage = this.handleMessage.bind(this) } readonly [Symbol.toStringTag] = '@libp2p/perf' async start (): Promise { - await this.components.registrar.handle(this.protocol, (data: IncomingStreamData) => { - void this.handleMessage(data).catch((err) => { - this.log.error('error handling perf protocol message - %e', err) - }) - }, { + await this.components.registrar.handle(this.protocol, this.handleMessage, { maxInboundStreams: this.maxInboundStreams, maxOutboundStreams: this.maxOutboundStreams, runOnLimitedConnection: this.runOnLimitedConnection @@ -51,18 +50,17 @@ export class Perf implements Startable, PerfInterface { return this.started } - async handleMessage (data: IncomingStreamData): Promise { - const { stream } = data - + async handleMessage (stream: Stream): Promise { try { const writeBlockSize = this.writeBlockSize let bytesToSendBack: number | undefined - for await (const buf of stream.source) { + for await (const buf of stream) { if (bytesToSendBack == null) { + const list = new Uint8ArrayList(buf) // downcast 64 to 52 bits to avoid bigint arithmetic performance penalty - bytesToSendBack = Number(buf.getBigUint64(0, false)) + bytesToSendBack = Number(list.getBigUint64(0, false)) } // Ingest all the data and wait for the read side to close @@ -74,17 +72,23 @@ export class Perf implements Startable, PerfInterface { const uint8Buf = new Uint8Array(this.buf, 0, this.buf.byteLength) - await stream.sink(async function * () { - while (bytesToSendBack > 0) { - let toSend: number = writeBlockSize - if (toSend > bytesToSendBack) { - toSend = bytesToSendBack - } + while (bytesToSendBack > 0) { + let toSend: number = writeBlockSize + if (toSend > bytesToSendBack) { + toSend = bytesToSendBack + } + + bytesToSendBack = bytesToSendBack - toSend + const buf = uint8Buf.subarray(0, toSend) - bytesToSendBack = bytesToSendBack - toSend - yield uint8Buf.subarray(0, toSend) + const sendMore = stream.send(buf) + + if (!sendMore) { + await raceEvent(stream, 'drain') } - }()) + } + + await stream.closeWrite() } catch (err: any) { stream.abort(err) } @@ -127,8 +131,12 @@ export class Perf implements Startable, PerfInterface { objectMode: true }) - stream.sink(async function * () { - yield uint8Buf.subarray(0, 8) + Promise.resolve().then(async () => { + const sendMore = stream.send(uint8Buf.subarray(0, 8)) + + if (!sendMore) { + await raceEvent(stream, 'drain') + } while (sendBytes > 0) { let toSend: number = writeBlockSize @@ -137,7 +145,11 @@ export class Perf implements Startable, PerfInterface { toSend = sendBytes } - yield uint8Buf.subarray(0, toSend) + const sendMore = stream.send(uint8Buf.subarray(0, toSend)) + + if (!sendMore) { + await raceEvent(stream, 'drain') + } sendBytes -= toSend @@ -160,7 +172,7 @@ export class Perf implements Startable, PerfInterface { } output.end() - }()) + }) .catch(err => { output.end(err) }) @@ -169,13 +181,15 @@ export class Perf implements Startable, PerfInterface { log('upload complete after %d ms', Date.now() - uploadStart) + await stream.closeWrite(options) + // Read the received bytes let lastAmountOfBytesReceived = 0 lastReportedTime = Date.now() let totalBytesReceived = 0 const downloadStart = Date.now() - for await (const buf of stream.source) { + for await (const buf of stream) { if (Date.now() - lastReportedTime > 1000) { yield { type: 'intermediary', @@ -208,7 +222,6 @@ export class Perf implements Startable, PerfInterface { } log('performed %s to %p', this.protocol, connection.remotePeer) - await stream.close() } catch (err: any) { log('error sending %d/%d bytes to %p: %s', totalBytesSent, sendBytes, connection.remotePeer, err) stream.abort(err) diff --git a/packages/protocol-perf/test/index.spec.ts b/packages/protocol-perf/test/index.spec.ts index f48b632436..dc9a98a8fe 100644 --- a/packages/protocol-perf/test/index.spec.ts +++ b/packages/protocol-perf/test/index.spec.ts @@ -1,12 +1,11 @@ /* eslint-env mocha */ import { start, stop } from '@libp2p/interface' -import { streamPair } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import last from 'it-last' -import { duplexPair } from 'it-pair/duplex' import { stubInterface } from 'sinon-ts' import { Perf } from '../src/perf-service.js' import type { ComponentLogger, Connection } from '@libp2p/interface' @@ -57,22 +56,18 @@ describe('perf', () => { // simulate connection between nodes const ma = multiaddr('/ip4/0.0.0.0') - const duplexes = duplexPair() - const streams = streamPair({ duplex: duplexes[0] }, { duplex: duplexes[1] }) + const [outboundStream, inboundStream] = await streamPair() const aToB = stubInterface({ log: defaultLogger().forComponent('connection') }) - aToB.newStream.resolves(streams[0]) + aToB.newStream.resolves(outboundStream) localComponents.connectionManager.openConnection.withArgs(ma, { force: true }).resolves(aToB) localComponents.connectionManager.getConnections.returns([]) - const bToA = stubInterface({ - log: defaultLogger().forComponent('connection') - }) - void server.handleMessage({ stream: streams[1], connection: bToA }) + void server.handleMessage(inboundStream) // Run Perf const finalResult = await last(client.measurePerformance(ma, 1024, 1024)) @@ -92,8 +87,7 @@ describe('perf', () => { // simulate connection between nodes const ma = multiaddr('/ip4/0.0.0.0') - const duplexes = duplexPair() - const streams = streamPair({ duplex: duplexes[0] }, { duplex: duplexes[1] }) + const streams = await streamPair() const aToB = stubInterface({ log: defaultLogger().forComponent('connection') @@ -102,10 +96,7 @@ describe('perf', () => { localComponents.connectionManager.openConnection.resolves(aToB) localComponents.connectionManager.getConnections.returns([]) - const bToA = stubInterface({ - log: defaultLogger().forComponent('connection') - }) - void server.handleMessage({ stream: streams[1], connection: bToA }) + void server.handleMessage(streams[1]) // Run Perf const finalResult = await last(client.measurePerformance(ma, 1024, 1024, { diff --git a/packages/protocol-perf/test/run._ts b/packages/protocol-perf/test/run._ts deleted file mode 100644 index 79dc00f4e8..0000000000 --- a/packages/protocol-perf/test/run._ts +++ /dev/null @@ -1,63 +0,0 @@ -/* eslint-disable no-console */ -import { noise } from '@chainsafe/libp2p-noise' -import { yamux } from '@chainsafe/libp2p-yamux' -import { mplex } from '@libp2p/mplex' -import { plaintext } from '@libp2p/plaintext' -import { tcp } from '@libp2p/tcp' -import { createLibp2p, type Libp2p } from 'libp2p' -import { perf, type PerfOutput, type Perf } from '../src/index.js' - -const ONE_MEG = 1024 * 1024 -const DOWNLOAD_BYTES = ONE_MEG * 1024 * 5 - -async function createNode (): Promise> { - return createLibp2p({ - addresses: { - listen: [ - '/ip4/0.0.0.0/tcp/0' - ] - }, - transports: [ - tcp() - ], - connectionEncrypters: [ - noise(), plaintext() - ], - streamMuxers: [ - yamux(), mplex() - ], - services: { - perf: perf() - }, - connectionManager: { - minConnections: 0 - } - }) -} - -const libp2p1 = await createNode() -const libp2p2 = await createNode() - -let last: PerfOutput | undefined - -for await (const output of libp2p1.services.perf.measurePerformance(libp2p2.getMultiaddrs()[0], 0, DOWNLOAD_BYTES)) { - last = output - console.info(output) - - console.info((output.downloadBytes / (1024 * 1024)) / output.timeSeconds, 'MB/s') -} - -if (last?.type === 'final') { - console.info((last.downloadBytes / (1024 * 1024)) / last.timeSeconds, 'MB/s') -} - -await libp2p1.stop() -await libp2p2.stop() - -// plaintext/yamux - 1354 MB/s -// plaintext/mplex - 34478 MB/s -// noise/yamux - 60 MB/s -// noise/mplex - 62 MB/s - -// noise/yamux/native crypto - 282 MB/s -// noise/mplex/native crypto - 420 MB/s diff --git a/packages/protocol-ping/package.json b/packages/protocol-ping/package.json index 860bbd2674..1c40e56af2 100644 --- a/packages/protocol-ping/package.json +++ b/packages/protocol-ping/package.json @@ -46,8 +46,8 @@ "@libp2p/crypto": "^5.1.7", "@libp2p/interface": "^2.10.5", "@libp2p/interface-internal": "^2.3.18", + "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", - "it-byte-stream": "^2.0.2", "main-event": "^1.0.1", "uint8arrays": "^5.1.0" }, @@ -55,8 +55,8 @@ "@libp2p/logger": "^5.1.21", "@libp2p/peer-id": "^5.1.8", "aegir": "^47.0.14", - "it-pair": "^2.0.6", - "p-defer": "^4.0.1", + "delay": "^6.0.0", + "sinon": "^21.0.0", "sinon-ts": "^2.0.0" }, "sideEffects": false diff --git a/packages/protocol-ping/src/ping.ts b/packages/protocol-ping/src/ping.ts index ab6ce1e15b..cb2d763b96 100644 --- a/packages/protocol-ping/src/ping.ts +++ b/packages/protocol-ping/src/ping.ts @@ -1,11 +1,11 @@ import { randomBytes } from '@libp2p/crypto' -import { ProtocolError, TimeoutError, serviceCapabilities } from '@libp2p/interface' -import { byteStream } from 'it-byte-stream' +import { ProtocolError, serviceCapabilities } from '@libp2p/interface' +import { byteStream } from '@libp2p/utils' import { setMaxListeners } from 'main-event' import { equals as uint8ArrayEquals } from 'uint8arrays/equals' import { PROTOCOL_PREFIX, PROTOCOL_NAME, PING_LENGTH, PROTOCOL_VERSION, TIMEOUT, MAX_INBOUND_STREAMS, MAX_OUTBOUND_STREAMS } from './constants.js' import type { PingComponents, PingInit, Ping as PingInterface } from './index.js' -import type { AbortOptions, Stream, PeerId, Startable, IncomingStreamData } from '@libp2p/interface' +import type { AbortOptions, Stream, PeerId, Startable, Connection } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' export class Ping implements Startable, PingInterface { @@ -26,7 +26,7 @@ export class Ping implements Startable, PingInterface { this.maxOutboundStreams = init.maxOutboundStreams ?? MAX_OUTBOUND_STREAMS this.runOnLimitedConnection = init.runOnLimitedConnection ?? true - this.handleMessage = this.handleMessage.bind(this) + this.handlePing = this.handlePing.bind(this) } readonly [Symbol.toStringTag] = '@libp2p/ping' @@ -36,7 +36,7 @@ export class Ping implements Startable, PingInterface { ] async start (): Promise { - await this.components.registrar.handle(this.protocol, this.handleMessage, { + await this.components.registrar.handle(this.protocol, this.handlePing, { maxInboundStreams: this.maxInboundStreams, maxOutboundStreams: this.maxOutboundStreams, runOnLimitedConnection: this.runOnLimitedConnection @@ -56,60 +56,31 @@ export class Ping implements Startable, PingInterface { /** * A handler to register with Libp2p to process ping messages */ - handleMessage (data: IncomingStreamData): void { - const log = data.connection.log.newScope('ping') + async handlePing (stream: Stream, connection: Connection): Promise { + const log = stream.log.newScope('ping') + log.trace('ping from %p', connection.remotePeer) - log.trace('ping from %p', data.connection.remotePeer) + const signal = AbortSignal.timeout(this.timeout) + setMaxListeners(Infinity, signal) - const { stream } = data const start = Date.now() const bytes = byteStream(stream) - let pinged = false - - Promise.resolve().then(async () => { - while (true) { - const signal = AbortSignal.timeout(this.timeout) - setMaxListeners(Infinity, signal) - signal.addEventListener('abort', () => { - stream?.abort(new TimeoutError('ping timeout')) - }) - - const buf = await bytes.read({ - bytes: PING_LENGTH, - signal - }) - await bytes.write(buf, { - signal - }) - pinged = true - } - }) - .catch(err => { - // ignore the error if we've processed at least one ping, the remote - // closed the stream and we handled or are handling the close cleanly - if (pinged && err.name === 'UnexpectedEOFError' && stream.readStatus !== 'ready') { - return - } - - log.error('ping from %p failed with error - %e', data.connection.remotePeer, err) - stream?.abort(err) + while (stream.readStatus === 'readable') { + const buf = await bytes.read({ + bytes: PING_LENGTH, + signal + }) + await bytes.write(buf, { + signal }) - .finally(() => { - const ms = Date.now() - start - log('ping from %p complete in %dms', data.connection.remotePeer, ms) - const signal = AbortSignal.timeout(this.timeout) - setMaxListeners(Infinity, signal) + log('ping from %p complete in %dms', connection.remotePeer, Date.now() - start) + } - stream.close({ - signal - }) - .catch(err => { - log.error('error closing ping stream from %p - %e', data.connection.remotePeer, err) - stream?.abort(err) - }) - }) + await stream.closeWrite({ + signal + }) } /** @@ -149,6 +120,8 @@ export class Ping implements Startable, PingInterface { const ms = Date.now() - start + stream.closeWrite() + if (!uint8ArrayEquals(data, result.subarray())) { throw new ProtocolError(`Received wrong ping ack after ${ms}ms`) } @@ -163,9 +136,7 @@ export class Ping implements Startable, PingInterface { throw err } finally { - if (stream != null) { - await stream.close(options) - } + stream?.closeWrite() } } } diff --git a/packages/protocol-ping/test/index.spec.ts b/packages/protocol-ping/test/index.spec.ts index 74c84d6c8d..a0e039df04 100644 --- a/packages/protocol-ping/test/index.spec.ts +++ b/packages/protocol-ping/test/index.spec.ts @@ -4,15 +4,14 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { start } from '@libp2p/interface' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, byteStream } from '@libp2p/utils' import { expect } from 'aegir/chai' -import { byteStream } from 'it-byte-stream' -import { pair } from 'it-pair' -import { duplexPair } from 'it-pair/duplex' -import pDefer from 'p-defer' +import delay from 'delay' +import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' -import { PING_PROTOCOL } from '../src/constants.js' +import { PING_LENGTH, PING_PROTOCOL } from '../src/constants.js' import { Ping } from '../src/ping.js' -import type { Stream, Connection } from '@libp2p/interface' +import type { Connection } from '@libp2p/interface' import type { ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { StubbedInstance } from 'sinon-ts' @@ -21,17 +20,6 @@ interface StubbedPingServiceComponents { connectionManager: StubbedInstance } -function echoStream (): StubbedInstance { - const stream = stubInterface() - - // make stream input echo to stream output - const duplex: any = pair() - stream.source = duplex.source - stream.sink = duplex.sink - - return stream -} - describe('ping', () => { let components: StubbedPingServiceComponents let ping: Ping @@ -51,14 +39,23 @@ describe('ping', () => { it('should be able to ping another peer', async () => { const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) + const [outgoingStream, incomingStream] = await streamPair() + + void Promise.resolve() + .then(async () => { + for await (const buf of incomingStream) { + incomingStream.send(buf) + } + + incomingStream.closeWrite() + }) const connection = stubInterface({ log: defaultLogger().forComponent('connection') }) components.connectionManager.openConnection.withArgs(remotePeer).resolves(connection) - const stream = echoStream() - connection.newStream.withArgs(PING_PROTOCOL).resolves(stream) + connection.newStream.withArgs(PING_PROTOCOL).resolves(outgoingStream) // Run ping await expect(ping.ping(remotePeer)).to.eventually.be.gte(0) @@ -68,24 +65,29 @@ describe('ping', () => { const timeout = 10 const remotePeer = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) + const [outgoingStream, incomingStream] = await streamPair({ + delay: 1_000 + }) + + void Promise.resolve() + .then(async () => { + for await (const buf of incomingStream) { + incomingStream.send(buf) + } + + incomingStream.closeWrite() + }) + const connection = stubInterface({ log: defaultLogger().forComponent('connection') }) components.connectionManager.openConnection.withArgs(remotePeer).resolves(connection) - const stream = echoStream() - const deferred = pDefer() - // eslint-disable-next-line require-yield - stream.source = (async function * () { - await deferred.promise - })() - stream.abort.callsFake((err) => { - deferred.reject(err) - }) - connection.newStream.withArgs(PING_PROTOCOL).resolves(stream) + connection.newStream.withArgs(PING_PROTOCOL).resolves(outgoingStream) // 10 ms timeout const signal = AbortSignal.timeout(timeout) + const outgoingStreamAbortSpy = Sinon.spy(outgoingStream, 'abort') // Run ping, should time out await expect(ping.ping(remotePeer, { @@ -94,69 +96,64 @@ describe('ping', () => { .with.property('name', 'AbortError') // should have aborted stream - expect(stream.abort).to.have.property('called', true) + expect(outgoingStreamAbortSpy).to.have.property('called', true) }) it('should handle incoming ping', async () => { - const duplex = duplexPair() - const incomingStream = stubInterface(duplex[0]) - const outgoingStream = stubInterface(duplex[1]) - + const [outgoingStream, incomingStream] = await streamPair() const handler = components.registrar.handle.getCall(0).args[1] // handle incoming ping stream - handler({ - stream: incomingStream, - connection: stubInterface({ - log: defaultLogger().forComponent('connection') - }) - }) + handler(incomingStream, stubInterface()) + ?.catch(() => {}) const b = byteStream(outgoingStream) - const input = new Uint8Array(32).fill(1) - void b.write(input) - const output = await b.read() - expect(output).to.equalBytes(input) + const input = new Uint8Array(PING_LENGTH).fill(1) + outgoingStream.log('write ping 1') + await b.write(input) + const output = await b.read({ + bytes: PING_LENGTH + }) + expect(output?.subarray()).to.equalBytes(input) + + // the spec allows sending more than one ping on a stream + const input2 = new Uint8Array(PING_LENGTH).fill(2) + outgoingStream.log('write ping 2') + await b.write(input2) + const output2 = await b.read({ + bytes: PING_LENGTH + }) + expect(output2.subarray()).to.equalBytes(input2) - const input2 = new Uint8Array(32).fill(2) - void b.write(input2) - const output2 = await b.read() - expect(output2).to.equalBytes(input2) + await outgoingStream.closeWrite() }) - it('should abort stream if sending stalls', async () => { - const deferred = pDefer() - - const duplex = duplexPair() - const incomingStream = stubInterface({ - ...duplex[0], - abort: (err) => { - deferred.resolve(err) - } - }) - const outgoingStream = stubInterface(duplex[1]) - + it('should throw if sending stalls', async () => { + const [outgoingStream, incomingStream] = await streamPair() const handler = components.registrar.handle.getCall(0).args[1] + const errorPromise = Promise.withResolvers() // handle incoming ping stream - handler({ - stream: incomingStream, - connection: stubInterface({ - log: defaultLogger().forComponent('connection') + handler(incomingStream, stubInterface()) + ?.catch((err) => { + errorPromise.resolve(err) }) - }) const b = byteStream(outgoingStream) // send a ping message plus a few extra bytes - void b.write(new Uint8Array(35)) + await b.write(new Uint8Array(35)) + + const pong = await b.read({ + bytes: PING_LENGTH + }) + expect(pong).to.have.lengthOf(PING_LENGTH) - const pong = await b.read() - expect(pong).to.have.lengthOf(32) + // ping messages have to be 32 bytes - we've sent 35 and will not send the + // remaining 29 bytes + await delay(200) - // never send the remaining 29 bytes (e.g. 64 - 35) - const err = await deferred.promise - expect(err).to.have.property('name', 'TimeoutError') + await expect(errorPromise.promise).to.eventually.have.property('name', 'AbortError') }) }) diff --git a/packages/pubsub-floodsub/package.json b/packages/pubsub-floodsub/package.json index b70a0d457d..54673bc07a 100644 --- a/packages/pubsub-floodsub/package.json +++ b/packages/pubsub-floodsub/package.json @@ -61,7 +61,7 @@ }, "devDependencies": { "@libp2p/crypto": "^5.1.7", - "@libp2p/interface-compliance-tests": "^6.4.16", + "@libp2p/interface-internal": "^2.3.18", "@libp2p/logger": "^5.1.21", "@libp2p/peer-collections": "^6.0.34", "@libp2p/peer-id": "^5.1.8", @@ -71,7 +71,8 @@ "multiformats": "^13.3.6", "p-wait-for": "^5.0.2", "protons": "^7.6.1", - "sinon": "^20.0.0" + "sinon": "^20.0.0", + "sinon-ts": "^2.0.0" }, "sideEffects": false } diff --git a/packages/pubsub-floodsub/test/compliance.spec.ts b/packages/pubsub-floodsub/test/compliance.spec.ts deleted file mode 100644 index 78bea1def2..0000000000 --- a/packages/pubsub-floodsub/test/compliance.spec.ts +++ /dev/null @@ -1,21 +0,0 @@ -/* eslint-env mocha */ - -import tests from '@libp2p/interface-compliance-tests/pubsub' -import { floodsub } from '../src/index.js' - -describe('interface compliance', () => { - tests({ - async setup (args) { - if (args == null) { - throw new Error('PubSubOptions is required') - } - - const pubsub = floodsub(args.init)(args.components) - - return pubsub - }, - async teardown () { - - } - }) -}) diff --git a/packages/pubsub-floodsub/test/floodsub.spec.ts b/packages/pubsub-floodsub/test/floodsub.spec.ts index bcdc3443f3..4997ad8b41 100644 --- a/packages/pubsub-floodsub/test/floodsub.spec.ts +++ b/packages/pubsub-floodsub/test/floodsub.spec.ts @@ -2,7 +2,6 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { StrictNoSign, start, stop } from '@libp2p/interface' -import { mockRegistrar } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' import { PeerSet } from '@libp2p/peer-collections' import { peerIdFromPrivateKey } from '@libp2p/peer-id' @@ -11,22 +10,27 @@ import { expect } from 'aegir/chai' import { sha256 } from 'multiformats/hashes/sha2' import pWaitFor from 'p-wait-for' import sinon from 'sinon' +import { stubInterface } from 'sinon-ts' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import { floodsub, multicodec } from '../src/index.js' import type { Message, PubSubRPC } from '@libp2p/interface' +import type { Registrar } from '@libp2p/interface-internal' +import type { StubbedInstance } from 'sinon-ts' const topic = 'my-topic' const message = uint8ArrayFromString('a neat message') describe('floodsub', () => { let pubsub: any + let registrar: StubbedInstance before(async () => { expect(multicodec).to.exist() const privateKey = await generateKeyPair('Ed25519') const peerId = peerIdFromPrivateKey(privateKey) + registrar = stubInterface() pubsub = floodsub({ emitSelf: true, @@ -34,7 +38,7 @@ describe('floodsub', () => { })({ peerId, privateKey, - registrar: mockRegistrar(), + registrar, logger: defaultLogger() }) }) diff --git a/packages/pubsub/package.json b/packages/pubsub/package.json index 3cd470ffbf..c521ca6350 100644 --- a/packages/pubsub/package.json +++ b/packages/pubsub/package.json @@ -78,13 +78,13 @@ "@libp2p/interface-internal": "^2.3.18", "@libp2p/peer-collections": "^6.0.34", "@libp2p/peer-id": "^5.1.8", - "@libp2p/utils": "^6.7.1", "it-length-prefixed": "^10.0.1", "it-pipe": "^3.0.1", "it-pushable": "^3.2.3", "main-event": "^1.0.1", "multiformats": "^13.3.6", "p-queue": "^8.1.0", + "race-event": "^1.6.1", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" }, diff --git a/packages/pubsub/src/index.ts b/packages/pubsub/src/index.ts index cb6e0a7bbe..da08e96903 100644 --- a/packages/pubsub/src/index.ts +++ b/packages/pubsub/src/index.ts @@ -41,7 +41,7 @@ import { verifySignature } from './sign.js' import { toMessage, ensureArray, noSignMsgId, msgId, toRpcMessage, randomSeqno } from './utils.js' -import type { PubSub, Message, StrictNoSign, StrictSign, PubSubInit, PubSubEvents, PeerStreams, PubSubRPCMessage, PubSubRPC, PubSubRPCSubscription, SubscriptionChangeData, PublishResult, TopicValidatorFn, ComponentLogger, Logger, Connection, PeerId, PrivateKey, IncomingStreamData } from '@libp2p/interface' +import type { PubSub, Message, StrictNoSign, StrictSign, PubSubInit, PubSubEvents, PeerStreams, PubSubRPCMessage, PubSubRPC, PubSubRPCSubscription, SubscriptionChangeData, PublishResult, TopicValidatorFn, ComponentLogger, Logger, Connection, PeerId, PrivateKey, Stream } from '@libp2p/interface' import type { Registrar } from '@libp2p/interface-internal' import type { Uint8ArrayList } from 'uint8arraylist' @@ -207,8 +207,7 @@ export abstract class PubSubBaseProtocol = Pu /** * On an inbound stream opened */ - protected _onIncomingStream (data: IncomingStreamData): void { - const { stream, connection } = data + protected _onIncomingStream (stream: Stream, connection: Connection): void { const peerId = connection.remotePeer if (stream.protocol == null) { diff --git a/packages/pubsub/src/peer-streams.ts b/packages/pubsub/src/peer-streams.ts index 44f4f33a00..2f926092da 100644 --- a/packages/pubsub/src/peer-streams.ts +++ b/packages/pubsub/src/peer-streams.ts @@ -1,8 +1,9 @@ -import { closeSource } from '@libp2p/utils/close-source' +import { AbortError } from '@libp2p/interface' import * as lp from 'it-length-prefixed' import { pipe } from 'it-pipe' import { pushable } from 'it-pushable' import { TypedEventEmitter } from 'main-event' +import { raceEvent } from 'race-event' import { Uint8ArrayList } from 'uint8arraylist' import type { ComponentLogger, Logger, Stream, PeerId, PeerStreamEvents } from '@libp2p/interface' import type { DecoderOptions as LpDecoderOptions } from 'it-length-prefixed' @@ -93,7 +94,7 @@ export class PeerStreams extends TypedEventEmitter { */ attachInboundStream (stream: Stream, decoderOptions?: DecoderOptions): AsyncIterable { const abortListener = (): void => { - closeSource(stream.source, this.log) + stream.abort(new AbortError()) } this._inboundAbortController.signal.addEventListener('abort', abortListener, { @@ -145,7 +146,15 @@ export class PeerStreams extends TypedEventEmitter { pipe( this.outboundStream, (source) => lp.encode(source), - this._rawOutboundStream + async (source) => { + for await (const buf of source) { + const sendMore = stream.send(buf) + + if (sendMore === false) { + await raceEvent(stream, 'drain') + } + } + } ).catch((err: Error) => { this.log.error(err) }) diff --git a/packages/pubsub/test/lifecycle.spec.ts b/packages/pubsub/test/lifecycle.spec.ts index 39abc29a10..aa6ac626bf 100644 --- a/packages/pubsub/test/lifecycle.spec.ts +++ b/packages/pubsub/test/lifecycle.spec.ts @@ -8,8 +8,7 @@ import { PubSubBaseProtocol } from '../src/index.js' import { PubsubImplementation, ConnectionPair, - MockRegistrar, - mockIncomingStreamEvent + MockRegistrar } from './utils/index.js' import type { PeerId, PublishResult, PubSubRPC, PubSubRPCMessage } from '@libp2p/interface' import type { Registrar } from '@libp2p/interface-internal' @@ -168,7 +167,7 @@ describe('pubsub base life cycle', () => { // Notify peers of connection topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream([protocol]), c1) expect(pubsubA.peers.size).to.be.eql(1) expect(pubsubB.peers.size).to.be.eql(1) @@ -189,7 +188,7 @@ describe('pubsub base life cycle', () => { sinon.spy(c0, 'newStream') topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream(protocol), c1) expect(c0.newStream).to.have.property('callCount', 1) // @ts-expect-error _removePeer is a protected method @@ -231,7 +230,7 @@ describe('pubsub base life cycle', () => { sinon.stub(c0, 'newStream').throws(error) topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream(protocol), c1) expect(c0.newStream).to.have.property('callCount', 1) }) @@ -249,7 +248,7 @@ describe('pubsub base life cycle', () => { const [c0, c1] = ConnectionPair() topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream(protocol), c1) // Notice peers of disconnect topologyA?.onDisconnect?.(peerIdB) diff --git a/packages/pubsub/test/peer-streams.spec.ts b/packages/pubsub/test/peer-streams.spec.ts index b4dad8d7ac..f2619c2a6d 100644 --- a/packages/pubsub/test/peer-streams.spec.ts +++ b/packages/pubsub/test/peer-streams.spec.ts @@ -4,6 +4,7 @@ import { peerIdFromPrivateKey } from '@libp2p/peer-id' import { expect } from 'aegir/chai' import * as lp from 'it-length-prefixed' import { pipe } from 'it-pipe' +import { raceEvent } from 'race-event' import { Uint8ArrayList } from 'uint8arraylist' import { PeerStreams } from '../src/peer-streams.js' import { ConnectionPair } from './utils/index.js' @@ -41,7 +42,15 @@ describe('peer-streams', () => { await pipe( [largeMessage], (source) => lp.encode(source, { maxDataLength: messageSize }), - outboundStream.sink + async (source) => { + for (const buf of source) { + const sendMore = outboundStream.send(buf) + + if (sendMore === false) { + await raceEvent(outboundStream, 'drain') + } + } + } ) // Close the outbound writer so the reader knows no more data is coming diff --git a/packages/pubsub/test/pubsub.spec.ts b/packages/pubsub/test/pubsub.spec.ts index 83533ab3cf..ccb1e40b2e 100644 --- a/packages/pubsub/test/pubsub.spec.ts +++ b/packages/pubsub/test/pubsub.spec.ts @@ -14,8 +14,7 @@ import { noSignMsgId } from '../src/utils.js' import { MockRegistrar, ConnectionPair, - PubsubImplementation, - mockIncomingStreamEvent + PubsubImplementation } from './utils/index.js' import type { PeerId, Message, PubSubRPC } from '@libp2p/interface' @@ -180,7 +179,7 @@ describe('pubsub base implementation', () => { const [c0, c1] = ConnectionPair() topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream(protocol), c1) }) afterEach(async () => { @@ -298,7 +297,7 @@ describe('pubsub base implementation', () => { const [c0, c1] = ConnectionPair() topologyA.onConnect?.(peerIdB, c0) - handlerB.handler(await mockIncomingStreamEvent(protocol, c1, peerIdA)) + handlerB.handler(await c1.newStream(protocol), c1) }) afterEach(async () => { diff --git a/packages/pubsub/test/utils/index.ts b/packages/pubsub/test/utils/index.ts index 453981c567..9f5f797f5a 100644 --- a/packages/pubsub/test/utils/index.ts +++ b/packages/pubsub/test/utils/index.ts @@ -1,7 +1,7 @@ import { duplexPair } from 'it-pair/duplex' import { PubSubBaseProtocol } from '../../src/index.js' import { RPC } from '../message/rpc.js' -import type { Connection, PeerId, PublishResult, PubSubRPC, PubSubRPCMessage, Topology, IncomingStreamData, StreamHandler, StreamHandlerRecord } from '@libp2p/interface' +import type { Connection, PublishResult, PubSubRPC, PubSubRPCMessage, Topology, StreamHandler, StreamHandlerRecord } from '@libp2p/interface' import type { Registrar } from '@libp2p/interface-internal' export class PubsubImplementation extends PubSubBaseProtocol { @@ -140,13 +140,3 @@ export const ConnectionPair = (): [Connection, Connection] => { } ] } - -export async function mockIncomingStreamEvent (protocol: string, conn: Connection, remotePeer: PeerId): Promise { - return { - stream: await conn.newStream([protocol]), - // @ts-expect-error incomplete implementation - connection: { - remotePeer - } - } -} diff --git a/packages/stream-multiplexer-mplex/package.json b/packages/stream-multiplexer-mplex/package.json index 4400e48c7c..3e6476966b 100644 --- a/packages/stream-multiplexer-mplex/package.json +++ b/packages/stream-multiplexer-mplex/package.json @@ -57,9 +57,7 @@ "dependencies": { "@libp2p/interface": "^2.10.5", "@libp2p/utils": "^6.7.1", - "it-pipe": "^3.0.1", "it-pushable": "^3.2.3", - "it-stream-types": "^2.0.2", "uint8-varint": "^2.0.4", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" @@ -69,15 +67,14 @@ "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", "benchmark": "^2.1.4", - "cborg": "^4.2.11", - "delay": "^6.0.0", "iso-random-stream": "^2.0.2", "it-all": "^3.0.8", "it-drain": "^3.0.9", "it-foreach": "^2.1.3", - "it-map": "^3.1.3", "it-pair": "^2.0.6", - "p-defer": "^4.0.1", + "it-pipe": "^3.0.1", + "it-stream-types": "^2.0.2", + "race-event": "^1.6.1", "random-int": "^3.0.0" }, "sideEffects": false diff --git a/packages/stream-multiplexer-mplex/src/encode.ts b/packages/stream-multiplexer-mplex/src/encode.ts index c3dbb6c6ab..d23cd4ec03 100644 --- a/packages/stream-multiplexer-mplex/src/encode.ts +++ b/packages/stream-multiplexer-mplex/src/encode.ts @@ -3,7 +3,6 @@ import { Uint8ArrayList } from 'uint8arraylist' import { allocUnsafe } from 'uint8arrays/alloc' import { MessageTypes } from './message-types.js' import type { Message } from './message-types.js' -import type { Source } from 'it-stream-types' const POOL_SIZE = 10 * 1024 @@ -56,10 +55,9 @@ const encoder = new Encoder() /** * Encode and yield one or more messages */ -export async function * encode (source: Source): AsyncGenerator { - for await (const message of source) { - const list = new Uint8ArrayList() - encoder.write(message, list) - yield list - } +export function encode (message: Message): Uint8ArrayList { + const list = new Uint8ArrayList() + encoder.write(message, list) + + return list } diff --git a/packages/stream-multiplexer-mplex/src/index.ts b/packages/stream-multiplexer-mplex/src/index.ts index 0ccd415ce6..e02e4890c1 100644 --- a/packages/stream-multiplexer-mplex/src/index.ts +++ b/packages/stream-multiplexer-mplex/src/index.ts @@ -33,10 +33,7 @@ import { serviceCapabilities } from '@libp2p/interface' import { MplexStreamMuxer } from './mplex.js' -import type { MplexComponents } from './mplex.js' -import type { StreamMuxer, StreamMuxerFactory, StreamMuxerInit } from '@libp2p/interface' - -export type { MplexComponents } +import type { MultiaddrConnection, StreamMuxer, StreamMuxerFactory } from '@libp2p/interface' export interface MplexInit { /** @@ -45,9 +42,9 @@ export interface MplexInit { * messages. If we receive a message larger than this an error will * be thrown and the connection closed. * - * @default 1048576 + * @default 1_048_576 */ - maxMsgSize?: number + maxMessageSize?: number /** * Constrains the size of the unprocessed message queue buffer. @@ -55,37 +52,10 @@ export interface MplexInit { * we have the complete message to deserialized. If the queue gets longer * than this value an error will be thrown and the connection closed. * - * @default 4194304 + * @default 4_194_304 */ maxUnprocessedMessageQueueSize?: number - /** - * The maximum number of multiplexed streams that can be open at any - * one time. A request to open more than this will have a stream - * reset message sent immediately as a response for the newly opened - * stream id - * - * @default 1024 - */ - maxInboundStreams?: number - - /** - * The maximum number of multiplexed streams that can be open at any - * one time. An attempt to open more than this will throw - * - * @default 1024 - */ - maxOutboundStreams?: number - - /** - * Incoming stream messages are buffered until processed by the stream - * handler. If the buffer reaches this size in bytes the stream will - * be reset - * - * @default 4194304 - */ - maxStreamBufferSize?: number - /** * When `maxInboundStreams` is hit, if the remote continues try to open * more than this many new multiplexed streams per second the connection @@ -99,10 +69,8 @@ export interface MplexInit { class Mplex implements StreamMuxerFactory { public protocol = '/mplex/6.7.0' private readonly _init: MplexInit - private readonly components: MplexComponents - constructor (components: MplexComponents, init: MplexInit = {}) { - this.components = components + constructor (init: MplexInit = {}) { this._init = init } @@ -112,9 +80,8 @@ class Mplex implements StreamMuxerFactory { '@libp2p/stream-multiplexing' ] - createStreamMuxer (init: StreamMuxerInit): StreamMuxer { - return new MplexStreamMuxer(this.components, { - ...init, + createStreamMuxer (maConn: MultiaddrConnection): StreamMuxer { + return new MplexStreamMuxer(maConn, { ...this._init }) } @@ -123,6 +90,6 @@ class Mplex implements StreamMuxerFactory { /** * @deprecated mplex is deprecated as it has no flow control. Please use yamux instead. */ -export function mplex (init: MplexInit = {}): (components: MplexComponents) => StreamMuxerFactory { - return (components) => new Mplex(components, init) +export function mplex (init: MplexInit = {}): () => StreamMuxerFactory { + return () => new Mplex(init) } diff --git a/packages/stream-multiplexer-mplex/src/mplex.ts b/packages/stream-multiplexer-mplex/src/mplex.ts index 870ab56f3f..daf4e89891 100644 --- a/packages/stream-multiplexer-mplex/src/mplex.ts +++ b/packages/stream-multiplexer-mplex/src/mplex.ts @@ -1,27 +1,16 @@ -import { TooManyOutboundProtocolStreamsError, MuxerClosedError } from '@libp2p/interface' -import { closeSource } from '@libp2p/utils/close-source' -import { RateLimiter } from '@libp2p/utils/rate-limiter' -import { pipe } from 'it-pipe' -import { pushable } from 'it-pushable' +import { MuxerClosedError } from '@libp2p/interface' +import { RateLimiter, AbstractStreamMuxer } from '@libp2p/utils' import { toString as uint8ArrayToString } from 'uint8arrays' -import { Decoder } from './decode.js' -import { encode } from './encode.js' -import { StreamInputBufferError } from './errors.js' +import { Decoder, MAX_MSG_QUEUE_SIZE, MAX_MSG_SIZE } from './decode.js' import { MessageTypes, MessageTypeNames } from './message-types.js' import { createStream } from './stream.js' import type { MplexInit } from './index.js' import type { Message } from './message-types.js' import type { MplexStream } from './stream.js' -import type { AbortOptions, ComponentLogger, Logger, Stream, StreamMuxer, StreamMuxerInit } from '@libp2p/interface' -import type { Pushable } from 'it-pushable' -import type { Sink, Source } from 'it-stream-types' +import type { CreateStreamOptions, MultiaddrConnection, MessageStreamDirection } from '@libp2p/interface' import type { Uint8ArrayList } from 'uint8arraylist' -const MAX_STREAMS_INBOUND_STREAMS_PER_CONNECTION = 1024 -const MAX_STREAMS_OUTBOUND_STREAMS_PER_CONNECTION = 1024 -const MAX_STREAM_BUFFER_SIZE = 1024 * 1024 * 4 // 4MB const DISCONNECT_THRESHOLD = 5 -const CLOSE_TIMEOUT = 500 function printMessage (msg: Message): any { const output: any = { @@ -30,93 +19,34 @@ function printMessage (msg: Message): any { } if (msg.type === MessageTypes.NEW_STREAM) { - output.data = uint8ArrayToString(msg.data instanceof Uint8Array ? msg.data : msg.data.subarray()) + output.data = uint8ArrayToString(msg.data.subarray()) } if (msg.type === MessageTypes.MESSAGE_INITIATOR || msg.type === MessageTypes.MESSAGE_RECEIVER) { - output.data = uint8ArrayToString(msg.data instanceof Uint8Array ? msg.data : msg.data.subarray(), 'base16') + output.data = uint8ArrayToString(msg.data.subarray(), 'base16') } return output } -export interface MplexComponents { - logger: ComponentLogger -} - -interface MplexStreamMuxerInit extends MplexInit, StreamMuxerInit { - /** - * The default timeout to use in ms when shutting down the muxer. - */ - closeTimeout?: number -} - -export class MplexStreamMuxer implements StreamMuxer { - public protocol = '/mplex/6.7.0' - - public sink: Sink, Promise> - public source: AsyncGenerator - - private readonly log: Logger +export class MplexStreamMuxer extends AbstractStreamMuxer { private _streamId: number - private readonly _streams: { initiators: Map, receivers: Map } - private readonly _init: MplexStreamMuxerInit - private readonly _source: Pushable - private readonly closeController: AbortController private readonly rateLimiter: RateLimiter - private readonly closeTimeout: number - private readonly logger: ComponentLogger - - constructor (components: MplexComponents, init?: MplexStreamMuxerInit) { - init = init ?? {} - - this.log = init.log?.newScope('mplex') ?? components.logger.forComponent('libp2p:mplex') - this.logger = components.logger - this._streamId = 0 - this._streams = { - /** - * Stream to ids map - */ - initiators: new Map(), - /** - * Stream to ids map - */ - receivers: new Map() - } - this._init = init - this.closeTimeout = init.closeTimeout ?? CLOSE_TIMEOUT - - /** - * An iterable sink - */ - this.sink = this._createSink() - - /** - * An iterable source - */ - this._source = pushable({ - objectMode: true, - onEnd: (): void => { - // the source has ended, we can't write any more messages to gracefully - // close streams so all we can do is destroy them - for (const stream of this._streams.initiators.values()) { - stream.destroy() - } - - for (const stream of this._streams.receivers.values()) { - stream.destroy() - } - } + private readonly maxMessageSize: number + private readonly maxUnprocessedMessageQueueSize: number + private readonly decoder: Decoder + + constructor (maConn: MultiaddrConnection, init: MplexInit) { + super(maConn, { + ...init, + protocol: '/mplex/6.7.0', + name: 'mplex' }) - this.source = pipe( - this._source, - source => encode(source) - ) - /** - * Close controller - */ - this.closeController = new AbortController() + this._streamId = 0 + this.maxMessageSize = init.maxMessageSize ?? MAX_MSG_SIZE + this.maxUnprocessedMessageQueueSize = init.maxUnprocessedMessageQueueSize ?? MAX_MSG_QUEUE_SIZE + this.decoder = new Decoder(this.maxMessageSize, this.maxUnprocessedMessageQueueSize) this.rateLimiter = new RateLimiter({ points: init.disconnectThreshold ?? DISCONNECT_THRESHOLD, @@ -124,207 +54,74 @@ export class MplexStreamMuxer implements StreamMuxer { }) } - /** - * Returns a Map of streams and their ids - */ - get streams (): Stream[] { - // Inbound and Outbound streams may have the same ids, so we need to make those unique - const streams: Stream[] = [] - for (const stream of this._streams.initiators.values()) { - streams.push(stream) - } - - for (const stream of this._streams.receivers.values()) { - streams.push(stream) + onData (data: Uint8Array | Uint8ArrayList): void { + for (const msg of this.decoder.write(data)) { + this.handleMessage(msg) } - return streams } /** * Initiate a new stream with the given name. If no name is * provided, the id of the stream will be used. */ - newStream (name?: string): Stream { - if (this.closeController.signal.aborted) { + onCreateStream (options: CreateStreamOptions): MplexStream { + if (this.status !== 'open') { throw new MuxerClosedError('Muxer already closed') } - const id = this._streamId++ - name = name == null ? id.toString() : name.toString() - const registry = this._streams.initiators - return this._newStream({ id, name, type: 'initiator', registry }) - } - - /** - * Close or abort all tracked streams and stop the muxer - */ - async close (options?: AbortOptions): Promise { - if (this.closeController.signal.aborted) { - return - } - - const signal = options?.signal ?? AbortSignal.timeout(this.closeTimeout) - - try { - // try to gracefully close all streams - await Promise.all( - this.streams.map(async s => s.close({ - signal - })) - ) - this._source.end() - - // try to gracefully close the muxer - await this._source.onEmpty({ - signal - }) - - this.closeController.abort() - } catch (err: any) { - this.abort(err) - } - } - - abort (err: Error): void { - if (this.closeController.signal.aborted) { - return - } - - this.streams.forEach(s => { s.abort(err) }) - this.closeController.abort(err) - } + const id = this._streamId++ - /** - * Called whenever an inbound stream is created - */ - _newReceiverStream (options: { id: number, name: string }): MplexStream { - const { id, name } = options - const registry = this._streams.receivers - return this._newStream({ id, name, type: 'receiver', registry }) + return this._newStream(id, 'outbound') } - _newStream (options: { id: number, name: string, type: 'initiator' | 'receiver', registry: Map }): MplexStream { - const { id, name, type, registry } = options - - this.log('new %s stream %s', type, id) - - if (type === 'initiator' && this._streams.initiators.size === (this._init.maxOutboundStreams ?? MAX_STREAMS_OUTBOUND_STREAMS_PER_CONNECTION)) { - throw new TooManyOutboundProtocolStreamsError('Too many outbound streams open') - } + _newStream (id: number, direction: MessageStreamDirection): MplexStream { + this.log('new %s stream %s', direction, id) - if (registry.has(id)) { - throw new Error(`${type} stream ${id} already exists!`) - } - - const send = async (msg: Message): Promise => { - if (this.log.enabled) { - this.log.trace('%s stream %s send', type, id, printMessage(msg)) - } - - this._source.push(msg) - } - - const onEnd = (): void => { - this.log('%s stream with id %s and protocol %s ended', type, id, stream.protocol) - registry.delete(id) - - if (this._init.onStreamEnd != null) { - this._init.onStreamEnd(stream) - } - } + const stream = createStream({ + id, + direction, + maxMsgSize: this.maxMessageSize, + log: this.log, + muxer: this + }) - const stream = createStream({ id, name, send, type, onEnd, maxMsgSize: this._init.maxMsgSize, log: this.log }) - registry.set(id, stream) return stream } - /** - * Creates a sink with an abortable source. Incoming messages will - * also have their size restricted. All messages will be varint decoded. - */ - _createSink (): Sink, Promise> { - const sink: Sink, Promise> = async source => { - const abortListener = (): void => { - closeSource(source, this.log) - } - - this.closeController.signal.addEventListener('abort', abortListener) - - try { - const decoder = new Decoder(this._init.maxMsgSize, this._init.maxUnprocessedMessageQueueSize) - - for await (const chunk of source) { - for (const msg of decoder.write(chunk)) { - await this._handleIncoming(msg) - } - } - - this._source.end() - } catch (err: any) { - this.log('error in sink', err) - this._source.end(err) // End the source with an error - } finally { - this.closeController.signal.removeEventListener('abort', abortListener) - } - } - - return sink - } - - async _handleIncoming (message: Message): Promise { - const { id, type } = message - + handleMessage (message: Message): void { if (this.log.enabled) { this.log.trace('incoming message', printMessage(message)) } // Create a new stream? if (message.type === MessageTypes.NEW_STREAM) { - if (this._streams.receivers.size === (this._init.maxInboundStreams ?? MAX_STREAMS_INBOUND_STREAMS_PER_CONNECTION)) { - this.log('too many inbound streams open') - - // not going to allow this stream, send the reset message manually - // instead of setting it up just to tear it down - this._source.push({ - id, - type: MessageTypes.RESET_RECEIVER - }) - - // if we've hit our stream limit, and the remote keeps trying to open - // more new streams, if they are doing this very quickly maybe they - // are attacking us and we should close the connection - try { - await this.rateLimiter.consume('new-stream', 1) - } catch { - this.log('rate limit hit when opening too many new streams over the inbound stream limit - closing remote connection') - // since there's no backpressure in mplex, the only thing we can really do to protect ourselves is close the connection - this.abort(new Error('Too many open streams')) - return - } - + // close the connection if the remote opens too many streams too quickly + try { + this.rateLimiter.consume('new-stream', 1) + } catch { + this.log('rate limit hit when opening too many new streams over the inbound stream limit - closing remote connection') + // since there's no backpressure in mplex, the only thing we can really do to protect ourselves is close the connection + this.abort(new Error('Too many open streams')) return } - const stream = this._newReceiverStream({ id, name: uint8ArrayToString(message.data instanceof Uint8Array ? message.data : message.data.subarray()) }) - - if (this._init.onIncomingStream != null) { - this._init.onIncomingStream(stream) - } + const stream = this._newStream(message.id, 'inbound') + this.onRemoteStream(stream) return } - const list = (type & 1) === 1 ? this._streams.initiators : this._streams.receivers - const stream = list.get(id) + const id = `${(message.type & 1) === 1 ? 'i' : 'r'}${message.id}` + const stream = this.streams.find(s => s.id === id) if (stream == null) { - this.log('missing stream %s for message type %s', id, MessageTypeNames[type]) + this.log('missing stream %s for message type %s', id, MessageTypeNames[message.type]) // if the remote keeps sending us messages for streams that have been // closed or were never opened they may be attacking us so if they do // this very quickly all we can do is close the connection try { - await this.rateLimiter.consume('missing-stream', 1) + this.rateLimiter.consume('missing-stream', 1) } catch { this.log('rate limit hit when receiving messages for streams that do not exist - closing remote connection') // since there's no backpressure in mplex, the only thing we can really do to protect ourselves is close the connection @@ -335,41 +132,28 @@ export class MplexStreamMuxer implements StreamMuxer { return } - const maxBufferSize = this._init.maxStreamBufferSize ?? MAX_STREAM_BUFFER_SIZE - try { - switch (type) { + switch (message.type) { case MessageTypes.MESSAGE_INITIATOR: case MessageTypes.MESSAGE_RECEIVER: - if (stream.sourceReadableLength() > maxBufferSize) { - // Stream buffer has got too large, reset the stream - this._source.push({ - id: message.id, - type: type === MessageTypes.MESSAGE_INITIATOR ? MessageTypes.RESET_RECEIVER : MessageTypes.RESET_INITIATOR - }) - - // Inform the stream consumer they are not fast enough - throw new StreamInputBufferError('Input buffer full - increase Mplex maxBufferSize to accommodate slow consumers') - } - // We got data from the remote, push it into our local stream - stream.sourcePush(message.data) + stream.onData(message.data) break case MessageTypes.CLOSE_INITIATOR: case MessageTypes.CLOSE_RECEIVER: - // The remote has stopped writing, so we can stop reading - stream.remoteCloseWrite() + // The remote has stopped writing + stream.onRemoteCloseWrite() break case MessageTypes.RESET_INITIATOR: case MessageTypes.RESET_RECEIVER: // The remote has errored, stop reading and writing to the stream immediately - stream.reset() + stream.onRemoteReset() break default: - this.log('unknown message type %s', type) + this.log('unknown message type') } } catch (err: any) { - this.log.error('error while processing message', err) + this.log.error('error while processing message - %e', err) stream.abort(err) } } diff --git a/packages/stream-multiplexer-mplex/src/stream.ts b/packages/stream-multiplexer-mplex/src/stream.ts index 13005ccf17..e4b8aec627 100644 --- a/packages/stream-multiplexer-mplex/src/stream.ts +++ b/packages/stream-multiplexer-mplex/src/stream.ts @@ -1,95 +1,118 @@ -import { AbstractStream } from '@libp2p/utils/abstract-stream' +import { AbstractStream } from '@libp2p/utils' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { MAX_MSG_SIZE } from './decode.js' +import { encode } from './encode.ts' import { InitiatorMessageTypes, ReceiverMessageTypes } from './message-types.js' -import type { Message } from './message-types.js' -import type { Logger } from '@libp2p/interface' -import type { AbstractStreamInit } from '@libp2p/utils/abstract-stream' +import type { MplexStreamMuxer } from './mplex.ts' +import type { Logger, MessageStreamDirection } from '@libp2p/interface' +import type { AbstractStreamInit, SendResult } from '@libp2p/utils' +import type { AbortOptions } from 'it-pushable' export interface Options { id: number - send(msg: Message): Promise log: Logger - name?: string - onEnd?(err?: Error): void - type?: 'initiator' | 'receiver' + direction: MessageStreamDirection maxMsgSize?: number + muxer: MplexStreamMuxer } interface MplexStreamInit extends AbstractStreamInit { - streamId: number - name: string - send(msg: Message): Promise - - /** - * The maximum allowable data size, any data larger than this will be - * chunked and sent in multiple data messages - */ maxDataSize: number + muxer: MplexStreamMuxer + direction: MessageStreamDirection } export class MplexStream extends AbstractStream { - private readonly name: string - private readonly streamId: number - private readonly send: (msg: Message) => Promise + public readonly streamId: number private readonly types: Record private readonly maxDataSize: number + private readonly muxer: MplexStreamMuxer constructor (init: MplexStreamInit) { super(init) this.types = init.direction === 'outbound' ? InitiatorMessageTypes : ReceiverMessageTypes - this.send = init.send - this.name = init.name - this.streamId = init.streamId this.maxDataSize = init.maxDataSize - } + this.muxer = init.muxer + this.streamId = parseInt(this.id.substring(1)) - async sendNewStream (): Promise { - await this.send({ id: this.streamId, type: InitiatorMessageTypes.NEW_STREAM, data: new Uint8ArrayList(uint8ArrayFromString(this.name)) }) + if (init.direction === 'outbound') { + this.muxer.send( + encode({ + id: this.streamId, + type: InitiatorMessageTypes.NEW_STREAM, + data: new Uint8ArrayList(uint8ArrayFromString(this.id)) + }) + ) + } } - async sendData (data: Uint8ArrayList): Promise { - data = data.sublist() + sendData (data: Uint8ArrayList): SendResult { + const list = new Uint8ArrayList() + const sentBytes = data.byteLength while (data.byteLength > 0) { const toSend = Math.min(data.byteLength, this.maxDataSize) - await this.send({ - id: this.streamId, - type: this.types.MESSAGE, - data: data.sublist(0, toSend) - }) + const slice = data.sublist(0, toSend) + data = data.sublist(toSend) - data.consume(toSend) + list.append( + encode({ + id: this.streamId, + type: this.types.MESSAGE, + data: slice + }) + ) + } + + return { + sentBytes, + canSendMore: this.muxer.send(list) } } - async sendReset (): Promise { - await this.send({ id: this.streamId, type: this.types.RESET }) + sendReset (): boolean { + return this.muxer.send( + encode({ + id: this.streamId, + type: this.types.RESET + }) + ) } - async sendCloseWrite (): Promise { - await this.send({ id: this.streamId, type: this.types.CLOSE }) + async sendCloseWrite (options?: AbortOptions): Promise { + this.muxer.send( + encode({ + id: this.streamId, + type: this.types.CLOSE + }) + ) + options?.signal?.throwIfAborted() } - async sendCloseRead (): Promise { + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() // mplex does not support close read, only close write } + + sendPause (): void { + // mplex does not support backpressure + } + + sendResume (): void { + // mplex does not support backpressure + } } export function createStream (options: Options): MplexStream { - const { id, name, send, onEnd, type = 'initiator', maxMsgSize = MAX_MSG_SIZE } = options - const direction = type === 'initiator' ? 'outbound' : 'inbound' + const { id, muxer, direction, maxMsgSize = MAX_MSG_SIZE } = options return new MplexStream({ - id: type === 'initiator' ? (`i${id}`) : `r${id}`, - streamId: id, - name: `${name ?? id}`, + id: direction === 'outbound' ? (`i${id}`) : `r${id}`, direction, maxDataSize: maxMsgSize, - onEnd, - send, + muxer, log: options.log.newScope(`${direction}:${id}`) }) } diff --git a/packages/stream-multiplexer-mplex/test/coder.spec.ts b/packages/stream-multiplexer-mplex/test/coder.spec.ts index dfbbe809e9..7fd46d448b 100644 --- a/packages/stream-multiplexer-mplex/test/coder.spec.ts +++ b/packages/stream-multiplexer-mplex/test/coder.spec.ts @@ -13,9 +13,9 @@ import type { Message, NewStreamMessage } from '../src/message-types.js' describe('coder', () => { it('should encode header', async () => { - const source: Message[] = [{ id: 17, type: 0, data: new Uint8ArrayList(uint8ArrayFromString('17')) }] + const source: Message = { id: 17, type: 0, data: new Uint8ArrayList(uint8ArrayFromString('17')) } - const data = new Uint8ArrayList(...await all(encode(source))).subarray() + const data = new Uint8ArrayList(...all(encode(source))).subarray() const expectedHeader = uint8ArrayFromString('880102', 'base16') expect(data.slice(0, expectedHeader.length)).to.equalBytes(expectedHeader) @@ -28,35 +28,23 @@ describe('coder', () => { } }) - it('should encode several msgs into buffer', async () => { - const source: Message[] = [ - { id: 17, type: 0, data: new Uint8ArrayList(uint8ArrayFromString('17')) }, - { id: 19, type: 0, data: new Uint8ArrayList(uint8ArrayFromString('19')) }, - { id: 21, type: 0, data: new Uint8ArrayList(uint8ArrayFromString('21')) } - ] - - const data = new Uint8ArrayList(...await all(encode(source))).subarray() - - expect(data).to.equalBytes(uint8ArrayFromString('88010231379801023139a801023231', 'base16')) - }) - it('should encode from Uint8ArrayList', async () => { - const source: NewStreamMessage[] = [{ + const source: NewStreamMessage = { id: 17, type: 0, data: new Uint8ArrayList( uint8ArrayFromString(Math.random().toString()), uint8ArrayFromString(Math.random().toString()) ) - }] + } - const data = new Uint8ArrayList(...await all(encode(source))).subarray() + const data = new Uint8ArrayList(...all(encode(source))).subarray() expect(data).to.equalBytes( uint8ArrayConcat([ uint8ArrayFromString('8801', 'base16'), - Uint8Array.from([source[0].data.length]), - source[0].data instanceof Uint8Array ? source[0].data : source[0].data.slice() + Uint8Array.from([source.data.length]), + source.data.subarray() ]) ) }) @@ -77,9 +65,9 @@ describe('coder', () => { }) it('should encode zero length body msg', async () => { - const source: Message[] = [{ id: 17, type: 0, data: new Uint8ArrayList() }] + const source: Message = { id: 17, type: 0, data: new Uint8ArrayList() } - const data = new Uint8ArrayList(...await all(encode(source))).subarray() + const data = new Uint8ArrayList(...all(encode(source))).subarray() expect(data).to.equalBytes(uint8ArrayFromString('880100', 'base16')) }) diff --git a/packages/stream-multiplexer-mplex/test/compliance.spec.ts b/packages/stream-multiplexer-mplex/test/compliance.spec.ts index ffca82d8a9..e62613d596 100644 --- a/packages/stream-multiplexer-mplex/test/compliance.spec.ts +++ b/packages/stream-multiplexer-mplex/test/compliance.spec.ts @@ -1,18 +1,14 @@ /* eslint-env mocha */ import tests from '@libp2p/interface-compliance-tests/stream-muxer' -import { defaultLogger } from '@libp2p/logger' import { mplex } from '../src/index.js' describe('compliance', () => { tests({ async setup () { return mplex({ - maxInboundStreams: Infinity, disconnectThreshold: Infinity - })({ - logger: defaultLogger() - }) + })() }, async teardown () {} }) diff --git a/packages/stream-multiplexer-mplex/test/fixtures/encode.ts b/packages/stream-multiplexer-mplex/test/fixtures/encode.ts new file mode 100644 index 0000000000..7d5189f2bd --- /dev/null +++ b/packages/stream-multiplexer-mplex/test/fixtures/encode.ts @@ -0,0 +1,9 @@ +import { encode as encoder } from '../../src/encode.ts' +import type { Message } from '../../src/message-types.ts' +import type { Uint8ArrayList } from 'uint8arraylist' + +export function * encode (source: Iterable): Generator { + for (const buf of source) { + yield encoder(buf) + } +} diff --git a/packages/stream-multiplexer-mplex/test/fixtures/utils.ts b/packages/stream-multiplexer-mplex/test/fixtures/utils.ts index 53ee127098..be0fb2c512 100644 --- a/packages/stream-multiplexer-mplex/test/fixtures/utils.ts +++ b/packages/stream-multiplexer-mplex/test/fixtures/utils.ts @@ -17,55 +17,3 @@ export function messageWithBytes (msg: Message): Message | MessageWithBytes { return msg } - -export function arrayToGenerator (data: T[]): AsyncGenerator { - let done: Error | boolean = false - let index = -1 - - const generator: AsyncGenerator = { - [Symbol.asyncIterator]: () => { - return generator - }, - async next () { - if (done instanceof Error) { - throw done - } - - index++ - - if (index === data.length) { - done = true - } - - if (done) { - return { - done: true, - value: undefined - } - } - - return { - done: false, - value: data[index] - } - }, - async return (): Promise> { - done = true - - return { - done: true, - value: undefined - } - }, - async throw (err: Error): Promise> { - done = err - - return { - done: true, - value: undefined - } - } - } - - return generator -} diff --git a/packages/stream-multiplexer-mplex/test/mplex.spec.ts b/packages/stream-multiplexer-mplex/test/mplex.spec.ts deleted file mode 100644 index 2f30e89378..0000000000 --- a/packages/stream-multiplexer-mplex/test/mplex.spec.ts +++ /dev/null @@ -1,184 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 5] */ - -import { defaultLogger } from '@libp2p/logger' -import { expect } from 'aegir/chai' -import delay from 'delay' -import all from 'it-all' -import { pushable } from 'it-pushable' -import pDefer from 'p-defer' -import { Uint8ArrayList } from 'uint8arraylist' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { encode } from '../src/encode.js' -import { mplex } from '../src/index.js' -import { MessageTypes } from '../src/message-types.js' -import { decode } from './fixtures/decode.js' -import type { CloseInitiatorMessage, Message, MessageInitiatorMessage, NewStreamMessage } from '../src/message-types.js' -import type { Source } from 'it-stream-types' - -describe('mplex', () => { - it('should restrict number of initiator streams per connection', async () => { - const maxOutboundStreams = 10 - const factory = mplex({ - maxOutboundStreams - })({ - logger: defaultLogger() - }) - const muxer = factory.createStreamMuxer() - - // max out the streams for this connection - for (let i = 0; i < maxOutboundStreams; i++) { - await muxer.newStream() - } - - // open one more - await expect((async () => { - await muxer.newStream() - })()).eventually.be.rejected - .with.property('name', 'TooManyOutboundProtocolStreamsError') - }) - - it('should restrict number of recipient streams per connection', async () => { - const maxInboundStreams = 10 - const factory = mplex({ - maxInboundStreams, - disconnectThreshold: Infinity - })({ - logger: defaultLogger() - }) - const muxer = factory.createStreamMuxer() - const stream = pushable() - - // max out the streams for this connection - for (let i = 0; i < maxInboundStreams; i++) { - const source: NewStreamMessage[] = [{ - id: i, - type: 0, - data: new Uint8ArrayList(uint8ArrayFromString('17')) - }] - - const data = new Uint8ArrayList(...(await all(encode(source)))) - - stream.push(data) - } - - // simulate a new incoming stream - const source: NewStreamMessage[] = [{ - id: 11, - type: 0, - data: new Uint8ArrayList(uint8ArrayFromString('17')) - }] - - const data = new Uint8ArrayList(...(await all(encode(source)))) - - stream.push(data) - stream.end() - - const bufs: Array = [] - const sinkDone = pDefer() - - void Promise.resolve().then(async () => { - for await (const buf of muxer.source) { - bufs.push(buf) - } - sinkDone.resolve() - }) - - await muxer.sink(stream) - await sinkDone.promise - - const messages = await all(decode()(bufs)) - - expect(messages).to.have.nested.property('[0].id', 11, 'Did not specify the correct stream id') - expect(messages).to.have.nested.property('[0].type', MessageTypes.RESET_RECEIVER, 'Did not reset the stream that tipped us over the inbound stream limit') - }) - - it('should reset a stream that fills the message buffer', async () => { - let sent = 0 - const streamSourceError = pDefer() - const maxStreamBufferSize = 1024 * 1024 // 1MB - const id = 17 - - // simulate a new incoming stream that sends lots of data - const input: Source = (async function * send () { - const newStreamMessage: NewStreamMessage = { - id, - type: MessageTypes.NEW_STREAM, - data: new Uint8ArrayList(new Uint8Array(1024)) - } - yield newStreamMessage - - await delay(10) - - for (let i = 0; i < 100; i++) { - const dataMessage: MessageInitiatorMessage = { - id, - type: MessageTypes.MESSAGE_INITIATOR, - data: new Uint8ArrayList(new Uint8Array(1024 * 1000)) - } - yield dataMessage - - sent++ - - await delay(10) - } - - await delay(10) - - const closeMessage: CloseInitiatorMessage = { - id, - type: MessageTypes.CLOSE_INITIATOR - } - yield closeMessage - })() - - // create the muxer - const factory = mplex({ - maxStreamBufferSize - })({ - logger: defaultLogger() - }) - const muxer = factory.createStreamMuxer({ - log: defaultLogger().forComponent('libp2p:mplex'), - onIncomingStream () { - // do nothing with the stream so the buffer fills up - }, - onStreamEnd (stream) { - void all(stream.source) - .then(() => { - streamSourceError.reject(new Error('Stream source did not error')) - }) - .catch(err => { - // should have errored before all 102 messages were sent - expect(sent).to.be.lessThan(10) - streamSourceError.resolve(err) - }) - } - }) - - // collect outgoing mplex messages - const muxerFinished = pDefer() - const messages: Message[] = [] - void Promise.resolve().then(async () => { - try { - // collect as many messages as possible - for await (const message of decode()(muxer.source)) { - messages.push(message) - } - } catch {} - muxerFinished.resolve() - }) - - // the muxer processes the messages - void muxer.sink(encode(input)) - - // source should have errored with appropriate code - const err = await streamSourceError.promise - expect(err).to.have.property('name', 'StreamInputBufferError') - - // should have sent reset message to peer for this stream - await muxerFinished.promise - expect(messages).to.have.nested.property('[0].id', id) - expect(messages).to.have.nested.property('[0].type', MessageTypes.RESET_RECEIVER) - }) -}) diff --git a/packages/stream-multiplexer-mplex/test/restrict-size.spec.ts b/packages/stream-multiplexer-mplex/test/restrict-size.spec.ts index 9ccae1e951..6586db64f9 100644 --- a/packages/stream-multiplexer-mplex/test/restrict-size.spec.ts +++ b/packages/stream-multiplexer-mplex/test/restrict-size.spec.ts @@ -7,9 +7,9 @@ import drain from 'it-drain' import each from 'it-foreach' import { pipe } from 'it-pipe' import { Uint8ArrayList } from 'uint8arraylist' -import { encode } from '../src/encode.js' import { MessageTypes } from '../src/message-types.js' import { decode } from './fixtures/decode.js' +import { encode } from './fixtures/encode.js' import type { Message } from '../src/message-types.js' describe('restrict size', () => { @@ -84,7 +84,7 @@ describe('restrict size', () => { encode, async function * (source) { // make one big buffer - yield new Uint8ArrayList(...(await all(source))).subarray() + yield new Uint8ArrayList(...(all(source))).subarray() }, decode(maxMessageSize, maxUnprocessedMessageQueueSize), (source) => each(source, chunk => { diff --git a/packages/stream-multiplexer-mplex/test/stream.spec.ts b/packages/stream-multiplexer-mplex/test/stream.spec.ts index fa9bf52920..d2c44332b2 100644 --- a/packages/stream-multiplexer-mplex/test/stream.spec.ts +++ b/packages/stream-multiplexer-mplex/test/stream.spec.ts @@ -1,22 +1,22 @@ /* eslint-env mocha */ -import { defaultLogger } from '@libp2p/logger' +import { StreamCloseEvent } from '@libp2p/interface' +import { multiaddrConnectionPair } from '@libp2p/utils' import { expect } from 'aegir/chai' -import * as cborg from 'cborg' import randomBytes from 'iso-random-stream/src/random.js' -import drain from 'it-drain' -import each from 'it-foreach' -import map from 'it-map' -import { pipe } from 'it-pipe' -import defer from 'p-defer' +import { pushable } from 'it-pushable' +import { raceEvent } from 'race-event' import randomInt from 'random-int' import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays' +import { MAX_MSG_SIZE } from '../src/decode.ts' +import { mplex } from '../src/index.ts' import { MessageTypes, MessageTypeNames } from '../src/message-types.js' -import { createStream } from '../src/stream.js' -import { arrayToGenerator, messageWithBytes } from './fixtures/utils.js' +import { decode } from './fixtures/decode.ts' +import { messageWithBytes } from './fixtures/utils.js' import type { Message } from '../src/message-types.js' import type { MplexStream } from '../src/stream.js' +import type { MessageStream } from '@libp2p/interface' function randomInput (min = 1, max = 100): Uint8ArrayList[] { return Array.from(Array(randomInt(min, max)), () => new Uint8ArrayList(randomBytes(randomInt(1, 128)))) @@ -26,365 +26,321 @@ function expectMsgType (actual: keyof typeof MessageTypeNames, expected: keyof t expect(MessageTypeNames[actual]).to.equal(MessageTypeNames[expected]) } -const msgToBuffer = (msg: Message): Uint8ArrayList => { - const m: any = { - ...msg - } - - if (msg.type === MessageTypes.NEW_STREAM || msg.type === MessageTypes.MESSAGE_INITIATOR || msg.type === MessageTypes.MESSAGE_RECEIVER) { - m.data = msg.data.slice() - } - - return new Uint8ArrayList(cborg.encode(m)) +export interface StreamPair { + initiatorStream: MplexStream + receiverStream: MplexStream, + initiatorMessages(): Promise + receiverMessages(): Promise } -const bufferToMessage = (buf: Uint8Array | Uint8ArrayList): Message => cborg.decode(buf.subarray()) - -interface onMessage { - (msg: Message, initiator: MplexStream, receiver: MplexStream): void -} +async function streamPair (): Promise { + const [outbound, inbound] = multiaddrConnectionPair() + const factory = mplex()() -export interface StreamPair { - initiatorSentMessages: Message[] - receiverSentMessages: Message[] -} + const outboundMuxer = factory.createStreamMuxer(outbound) + const inboundMuxer = factory.createStreamMuxer(inbound) -async function streamPair (n: number, onInitiatorMessage?: onMessage, onReceiverMessage?: onMessage): Promise { - const receiverSentMessages: Message[] = [] - const initiatorSentMessages: Message[] = [] - const id = 5 + const initiatorMessages = observeIncomingMessages(inbound) + const receiverMessages = observeIncomingMessages(outbound) + const receiverStream = Promise.withResolvers() - const mockInitiatorSend = async (msg: Message): Promise => { - initiatorSentMessages.push(msg) + inboundMuxer.addEventListener('stream', (evt) => { + receiverStream.resolve(evt.detail as MplexStream) + }) - if (onInitiatorMessage != null) { - onInitiatorMessage(msg, initiator, receiver) - } + const initiatorStream = await outboundMuxer.createStream() as MplexStream - receiver.sourcePush(msgToBuffer(msg)) + return { + initiatorStream, + receiverStream: await receiverStream.promise, + initiatorMessages, + receiverMessages } - const mockReceiverSend = async (msg: Message): Promise => { - receiverSentMessages.push(msg) +} - if (onReceiverMessage != null) { - onReceiverMessage(msg, initiator, receiver) - } +function observeIncomingMessages (messageStream: MessageStream): () => Promise { + const output: Message[] = [] + const queue = pushable() + const messages = Promise.withResolvers() - initiator.sourcePush(msgToBuffer(msg)) - } - const initiator = createStream({ - id, - send: mockInitiatorSend, - type: 'initiator', - log: defaultLogger().forComponent('initiator') - }) - const receiver = createStream({ - id, - send: mockReceiverSend, - type: 'receiver', - log: defaultLogger().forComponent('receiver') + messageStream.addEventListener('message', (evt) => { + queue.push(evt.data.subarray()) }) - const input = new Array(n).fill(0).map((_, i) => new Uint8ArrayList(Uint8Array.from([i]))) - void pipe( - receiver, - source => each(source, buf => { - const msg = bufferToMessage(buf) + Promise.resolve().then(async () => { + for await (const message of decode()(queue)) { + output.push(message) + } - // when the initiator sends a CLOSE message, we call close - if (msg.type === MessageTypes.CLOSE_INITIATOR) { - receiver.remoteCloseWrite() - } + messages.resolve(output) + }) - // when the initiator sends a RESET message, we call close - if (msg.type === MessageTypes.RESET_INITIATOR) { - receiver.reset() - } - }), - receiver - ).catch(() => {}) - - try { - await pipe( - arrayToGenerator(input), - initiator, - (source) => map(source, buf => { - const msg: Message = bufferToMessage(buf) - - // when the receiver sends a CLOSE message, we call close - if (msg.type === MessageTypes.CLOSE_RECEIVER) { - initiator.remoteCloseWrite() - } - - // when the receiver sends a RESET message, we call close - if (msg.type === MessageTypes.RESET_RECEIVER) { - initiator.reset() - } - }), - drain - ) - } catch { + return () => { + queue.end() - } - - return { - receiverSentMessages, - initiatorSentMessages + return messages.promise } } describe('stream', () => { it('should initiate stream with NEW_STREAM message', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const stream = createStream({ - id, - send: mockSend, - log: defaultLogger().forComponent('initiator') - }) - const input = randomInput() - - await pipe(input, stream) - - expect(msgs[0].id).to.equal(id) - expectMsgType(msgs[0].type, MessageTypes.NEW_STREAM) - expect(messageWithBytes(msgs[0])).to.have.property('data').that.equalBytes(uint8ArrayFromString(id.toString())) - }) + const pair = await streamPair() - it('should initiate named stream with NEW_STREAM message', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = `STREAM${Date.now()}` - const stream = createStream({ - id, - name, - send: mockSend, - log: defaultLogger().forComponent('initiator') - }) - const input = randomInput() + pair.initiatorStream.send(Uint8Array.from([0, 1, 2, 3, 4])) - await pipe(input, stream) + await Promise.all([ + pair.receiverStream.close(), + pair.initiatorStream.close() + ]) - expect(msgs[0].id).to.equal(id) + const msgs = await pair.initiatorMessages() + expect(msgs[0].id).to.equal(pair.initiatorStream.streamId) expectMsgType(msgs[0].type, MessageTypes.NEW_STREAM) - expect(messageWithBytes(msgs[0])).to.have.property('data').that.equalBytes(uint8ArrayFromString(name)) + expect(messageWithBytes(msgs[0])).to.have.property('data').that.equalBytes(uint8ArrayFromString(pair.initiatorStream.id.toString())) }) it('should end a stream when it is aborted', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = `STREAM${Date.now()}` - const deferred = defer() - const stream = createStream({ - id, - name, - onEnd: deferred.resolve, - send: mockSend, - log: defaultLogger().forComponent('initiator') - }) - const error = new Error('boom') - stream.abort(error) - const err = await deferred.promise - expect(err).to.equal(error) + const pair = await streamPair() + + const [evt] = await Promise.all([ + raceEvent(pair.initiatorStream, 'close'), + pair.initiatorStream.abort(error) + ]) + + expect(evt.error).to.equal(error) }) it('should end a stream when it is reset', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = `STREAM${Date.now()}` - const deferred = defer() - const stream = createStream({ - id, - name, - onEnd: deferred.resolve, - send: mockSend, - log: defaultLogger().forComponent('initiator') - }) + const pair = await streamPair() + const evtPromise = raceEvent(pair.initiatorStream, 'close') + pair.initiatorStream.onRemoteReset() - stream.reset() - - const err = await deferred.promise - expect(err).to.exist() - expect(err).to.have.property('name', 'StreamResetError') + await expect(evtPromise).to.eventually.have.nested.property('error.name', 'StreamResetError') }) it('should send data with MESSAGE_INITIATOR messages if stream initiator', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'initiator', - log: defaultLogger().forComponent('initiator') - }) + const pair = await streamPair() const input = randomInput() - await pipe(input, stream) + for (const buf of input) { + const sendMore = pair.initiatorStream.send(buf) + + if (!sendMore) { + await raceEvent(pair.initiatorStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.receiverStream, 'close'), + raceEvent(pair.initiatorStream, 'close'), + pair.receiverStream.close(), + pair.initiatorStream.close() + ]) // First and last should be NEW_STREAM and CLOSE + const msgs = await pair.initiatorMessages() const dataMsgs = msgs.slice(1, -1) expect(dataMsgs).have.length(input.length) dataMsgs.forEach((msg, i) => { - expect(msg.id).to.equal(id) + expect(msg.id).to.equal(pair.initiatorStream.streamId) expectMsgType(msg.type, MessageTypes.MESSAGE_INITIATOR) expect(messageWithBytes(msg)).to.have.property('data').that.equalBytes(input[i].subarray()) }) }) it('should send data with MESSAGE_RECEIVER messages if stream receiver', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'receiver', - log: defaultLogger().forComponent('receiver') - }) - + const pair = await streamPair() const input = randomInput() - await pipe(input, stream) + for (const buf of input) { + const sendMore = pair.receiverStream.send(buf) + + if (!sendMore) { + await raceEvent(pair.receiverStream, 'drain') + } + } + + await Promise.all([ + pair.receiverStream.close(), + pair.initiatorStream.close(), + raceEvent(pair.initiatorStream, 'close') + ]) // Last should be CLOSE + const msgs = await pair.receiverMessages() const dataMsgs = msgs.slice(0, -1) expect(dataMsgs).have.length(input.length) dataMsgs.forEach((msg, i) => { - expect(msg.id).to.equal(id) + expect(msg.id).to.equal(pair.receiverStream.streamId) expectMsgType(msg.type, MessageTypes.MESSAGE_RECEIVER) expect(messageWithBytes(msg)).to.have.property('data').that.equalBytes(input[i].subarray()) }) }) it('should close stream with CLOSE_INITIATOR message if stream initiator', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'initiator', - log: defaultLogger().forComponent('initiator') - }) + const pair = await streamPair() const input = randomInput() - await pipe(input, stream) + for (const buf of input) { + const sendMore = pair.initiatorStream.send(buf) - const closeMsg = msgs[msgs.length - 1] + if (!sendMore) { + await raceEvent(pair.initiatorStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.receiverStream, 'remoteClose'), + pair.initiatorStream.close() + ]) - expect(closeMsg.id).to.equal(id) + const msgs = await pair.initiatorMessages() + const closeMsg = msgs[msgs.length - 1] + expect(closeMsg.id).to.equal(pair.initiatorStream.streamId) expectMsgType(closeMsg.type, MessageTypes.CLOSE_INITIATOR) expect(closeMsg).to.not.have.property('data') }) it('should close stream with CLOSE_RECEIVER message if stream receiver', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'receiver', - log: defaultLogger().forComponent('receiver') - }) + const pair = await streamPair() const input = randomInput() - await pipe(input, stream) + for (const buf of input) { + const sendMore = pair.receiverStream.send(buf) - const closeMsg = msgs[msgs.length - 1] + if (!sendMore) { + await raceEvent(pair.receiverStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.initiatorStream, 'remoteClose'), + pair.receiverStream.close() + ]) - expect(closeMsg.id).to.equal(id) + const msgs = await pair.receiverMessages() + const closeMsg = msgs[msgs.length - 1] + expect(closeMsg.id).to.equal(pair.receiverStream.streamId) expectMsgType(closeMsg.type, MessageTypes.CLOSE_RECEIVER) expect(closeMsg).to.not.have.property('data') }) it('should reset stream on error with RESET_INITIATOR message if stream initiator', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'initiator', - log: defaultLogger().forComponent('initiator') - }) - const error = new Error(`Boom ${Date.now()}`) - const input = { - [Symbol.iterator]: function * () { - for (let i = 0; i < randomInt(1, 10); i++) { - yield new Uint8ArrayList(randomBytes(randomInt(1, 128))) - } - throw error + const pair = await streamPair() + + for (const buf of randomInput()) { + const sendMore = pair.receiverStream.send(buf) + + if (!sendMore) { + await raceEvent(pair.receiverStream, 'drain') } } - await expect(pipe(input, stream)).to.eventually.be - .rejected.with.property('message', error.message) + const error = new Error(`Boom ${Date.now()}`) - const resetMsg = msgs[msgs.length - 1] + await Promise.all([ + raceEvent(pair.receiverStream, 'close'), + pair.initiatorStream.abort(error) + ]) - expect(resetMsg.id).to.equal(id) + const msgs = await pair.initiatorMessages() + const resetMsg = msgs[msgs.length - 1] + expect(resetMsg.id).to.equal(pair.initiatorStream.streamId) expectMsgType(resetMsg.type, MessageTypes.RESET_INITIATOR) expect(resetMsg).to.not.have.property('data') }) it('should reset stream on error with RESET_RECEIVER message if stream receiver', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = id.toString() - const stream = createStream({ - id, - name, - send: mockSend, - type: 'receiver', - log: defaultLogger().forComponent('receiver') - }) - const error = new Error(`Boom ${Date.now()}`) - const input = { - [Symbol.iterator]: function * () { - for (let i = 0; i < randomInt(1, 10); i++) { - yield new Uint8ArrayList(randomBytes(randomInt(1, 128))) - } - throw error + const pair = await streamPair() + + for (const buf of randomInput()) { + const sendMore = pair.receiverStream.send(buf) + + if (!sendMore) { + await raceEvent(pair.receiverStream, 'drain') } } - await expect(pipe(input, stream)).to.eventually.be.rejected - .with.property('message', error.message) + const error = new Error(`Boom ${Date.now()}`) - const resetMsg = msgs[msgs.length - 1] + await Promise.all([ + raceEvent(pair.initiatorStream, 'close'), + pair.receiverStream.abort(error) + ]) - expect(resetMsg.id).to.equal(id) + const msgs = await pair.receiverMessages() + const resetMsg = msgs[msgs.length - 1] + expect(resetMsg.id).to.equal(pair.receiverStream.streamId) expectMsgType(resetMsg.type, MessageTypes.RESET_RECEIVER) expect(resetMsg).to.not.have.property('data') }) + it('should echo messages', async () => { + const dataLength = 1 + const pair = await streamPair() + + pair.receiverStream.addEventListener('message', (evt) => { + pair.receiverStream.send(evt.data) + }) + + for (const buf of randomInput(dataLength, dataLength)) { + const sendMoreInitiator = pair.initiatorStream.send(buf) + + if (!sendMoreInitiator) { + await raceEvent(pair.initiatorStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.initiatorStream, 'close'), + pair.initiatorStream.close(), + pair.receiverStream.close() + ]) + + const initiatorSentMessages = await pair.initiatorMessages() + const receiverSentMessages = await pair.receiverMessages() + + expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ + MessageTypes.NEW_STREAM, + ...new Array(dataLength).fill(0).map(() => MessageTypes.MESSAGE_INITIATOR), + MessageTypes.CLOSE_INITIATOR + ]) + + expect(receiverSentMessages.map(m => m.type)).to.deep.equal([ + ...new Array(dataLength).fill(0).map(() => MessageTypes.MESSAGE_RECEIVER), + MessageTypes.CLOSE_RECEIVER + ]) + }) + it('should close for reading (remote close)', async () => { const dataLength = 5 - const { - initiatorSentMessages, - receiverSentMessages - } = await streamPair(dataLength) + const pair = await streamPair() + + for (const buf of randomInput(dataLength, dataLength)) { + const sendMoreInitiator = pair.initiatorStream.send(buf) + + if (!sendMoreInitiator) { + await raceEvent(pair.initiatorStream, 'drain') + } + + const sendMoreReceiver = pair.receiverStream.send(buf) + + if (!sendMoreReceiver) { + await raceEvent(pair.receiverStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.initiatorStream, 'remoteClose'), + pair.receiverStream.close() + ]) + + const initiatorSentMessages = await pair.initiatorMessages() + const receiverSentMessages = await pair.receiverMessages() // 1x NEW_STREAM, dataLength x MESSAGE_INITIATOR 1x CLOSE_INITIATOR expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ @@ -393,8 +349,7 @@ describe('stream', () => { MessageTypes.MESSAGE_INITIATOR, MessageTypes.MESSAGE_INITIATOR, MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.CLOSE_INITIATOR + MessageTypes.MESSAGE_INITIATOR ]) // echoes the initiator messages back plus CLOSE_RECEIVER @@ -404,32 +359,40 @@ describe('stream', () => { MessageTypes.MESSAGE_RECEIVER, MessageTypes.MESSAGE_RECEIVER, MessageTypes.MESSAGE_RECEIVER, - MessageTypes.MESSAGE_RECEIVER, - MessageTypes.MESSAGE_RECEIVER, MessageTypes.CLOSE_RECEIVER ]) }) it('should close for reading and writing (abort on local error)', async () => { - const maxMsgs = 2 - const error = new Error(`Boom ${Date.now()}`) - let messages = 0 + const dataLength = 2 + const pair = await streamPair() - const dataLength = 5 - const { - initiatorSentMessages, - receiverSentMessages - } = await streamPair(dataLength, (initiatorMessage, initiator) => { - messages++ - - if (messages === maxMsgs) { - initiator.abort(error) + for (const buf of randomInput(dataLength, dataLength)) { + const sendMoreInitiator = pair.initiatorStream.send(buf) + + if (!sendMoreInitiator) { + await raceEvent(pair.initiatorStream, 'drain') } - }) + + const sendMoreReceiver = pair.receiverStream.send(buf) + + if (!sendMoreReceiver) { + await raceEvent(pair.receiverStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.receiverStream, 'close'), + pair.initiatorStream.abort(new Error('wat')) + ]) + + const initiatorSentMessages = await pair.initiatorMessages() + const receiverSentMessages = await pair.receiverMessages() expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ MessageTypes.NEW_STREAM, MessageTypes.MESSAGE_INITIATOR, + MessageTypes.MESSAGE_INITIATOR, MessageTypes.RESET_INITIATOR ]) @@ -440,34 +403,37 @@ describe('stream', () => { }) it('should close for reading and writing (abort on remote error)', async () => { - const maxMsgs = 4 - const error = new Error(`Boom ${Date.now()}`) - let messages = 0 + const dataLength = 2 + const pair = await streamPair() - const dataLength = 5 - const { - initiatorSentMessages, - receiverSentMessages - } = await streamPair(dataLength, (initiatorMessage, initiator, recipient) => { - messages++ - - if (messages === maxMsgs) { - recipient.abort(error) + for (const buf of randomInput(dataLength, dataLength)) { + const sendMoreInitiator = pair.initiatorStream.send(buf) + + if (!sendMoreInitiator) { + await raceEvent(pair.initiatorStream, 'drain') } - }) - // All messages sent to recipient + const sendMoreReceiver = pair.receiverStream.send(buf) + + if (!sendMoreReceiver) { + await raceEvent(pair.receiverStream, 'drain') + } + } + + await Promise.all([ + raceEvent(pair.initiatorStream, 'close'), + pair.receiverStream.abort(new Error('wat')) + ]) + + const initiatorSentMessages = await pair.initiatorMessages() + const receiverSentMessages = await pair.receiverMessages() + expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ MessageTypes.NEW_STREAM, MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.CLOSE_INITIATOR + MessageTypes.MESSAGE_INITIATOR ]) - // Recipient reset after two messages expect(receiverSentMessages.map(m => m.type)).to.deep.equal([ MessageTypes.MESSAGE_RECEIVER, MessageTypes.MESSAGE_RECEIVER, @@ -475,227 +441,32 @@ describe('stream', () => { ]) }) - it('should close immediately for reading and writing (reset on local error)', async () => { - const maxMsgs = 2 - const error = new Error(`Boom ${Date.now()}`) - let messages = 0 + it('should split writes larger than max message size', async () => { + const pair = await streamPair() - const dataLength = 5 - const { - initiatorSentMessages, - receiverSentMessages - } = await streamPair(dataLength, () => { - messages++ - - if (messages === maxMsgs) { - throw error - } - }) + const buf = new Uint8Array(MAX_MSG_SIZE * 2) - expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ - MessageTypes.NEW_STREAM, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.RESET_INITIATOR - ]) + if (!pair.initiatorStream.send(buf)) { + await raceEvent(pair.initiatorStream, 'drain') + } - // Reset after two messages - expect(receiverSentMessages.map(m => m.type)).to.deep.equal([ - MessageTypes.MESSAGE_RECEIVER, - MessageTypes.MESSAGE_RECEIVER + await Promise.all([ + pair.initiatorStream.close(), + pair.receiverStream.close() ]) - }) - it('should close immediately for reading and writing (reset on remote error)', async () => { - const maxMsgs = 2 - const error = new Error(`Boom ${Date.now()}`) - let messages = 0 - - const dataLength = 5 - const { - initiatorSentMessages, - receiverSentMessages - } = await streamPair(dataLength, () => {}, () => { - messages++ - - if (messages === maxMsgs) { - throw error - } - }) + const initiatorSentMessages = await pair.initiatorMessages() + const receiverSentMessages = await pair.receiverMessages() - // All messages sent to recipient expect(initiatorSentMessages.map(m => m.type)).to.deep.equal([ MessageTypes.NEW_STREAM, MessageTypes.MESSAGE_INITIATOR, MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, - MessageTypes.MESSAGE_INITIATOR, MessageTypes.CLOSE_INITIATOR ]) - // Recipient reset after two messages expect(receiverSentMessages.map(m => m.type)).to.deep.equal([ - MessageTypes.MESSAGE_RECEIVER, - MessageTypes.MESSAGE_RECEIVER, - MessageTypes.RESET_RECEIVER + MessageTypes.CLOSE_RECEIVER ]) }) - - it('should call onEnd only when both sides have closed', async () => { - const send = async (msg: Message): Promise => { - if (msg.type === MessageTypes.CLOSE_INITIATOR) { - // simulate remote closing connection - await stream.closeRead() - } else if (msg.type === MessageTypes.MESSAGE_INITIATOR) { - stream.sourcePush(msgToBuffer(msg)) - } - } - const id = randomInt(1000) - const name = id.toString() - const deferred = defer() - const onEnd = (err?: any): void => { err != null ? deferred.reject(err) : deferred.resolve() } - const stream = createStream({ - id, - name, - send, - onEnd, - log: defaultLogger().forComponent('initiator') - }) - const input = randomInput() - - void pipe( - input, - stream, - drain - ) - - await deferred.promise - }) - - it('should call onEnd with error for local error', async () => { - const send = async (): Promise => { - throw new Error(`Local boom ${Date.now()}`) - } - const id = randomInt(1000) - const deferred = defer() - const onEnd = (err?: any): void => { err != null ? deferred.reject(err) : deferred.resolve() } - const stream = createStream({ - id, - send, - onEnd, - log: defaultLogger().forComponent('initiator') - }) - const input = randomInput() - - pipe( - input, - stream, - drain - ).catch(() => {}) - - await expect(deferred.promise).to.eventually.be.rejectedWith(/Local boom/) - }) - - it('should split writes larger than max message size', async () => { - const messages: Message[] = [] - - const send = async (msg: Message): Promise => { - if (msg.type === MessageTypes.CLOSE_INITIATOR) { - await stream.closeRead() - } else if (msg.type === MessageTypes.MESSAGE_INITIATOR) { - messages.push(msg) - } - } - const maxMsgSize = 10 - const id = randomInt(1000) - const stream = createStream({ - id, - send, - maxMsgSize, - log: defaultLogger().forComponent('initiator') - }) - - await pipe( - [ - new Uint8ArrayList(new Uint8Array(maxMsgSize * 2)) - ], - stream, - drain - ) - - expect(messages.length).to.equal(2) - expect(messages[0]).to.have.nested.property('data.length', maxMsgSize) - expect(messages[1]).to.have.nested.property('data.length', maxMsgSize) - }) - - it('should error on double sink', async () => { - const send = async (): Promise => {} - const id = randomInt(1000) - const stream = createStream({ - id, - send, - log: defaultLogger().forComponent('initiator') - }) - - // first sink is ok - void stream.sink([]) - - // cannot sink twice - await expect(stream.sink([])) - .to.eventually.be.rejected.with.property('name', 'StreamStateError') - }) - - it('should error on double sink after sink has ended', async () => { - const send = async (): Promise => {} - const id = randomInt(1000) - const stream = createStream({ - id, - send, - log: defaultLogger().forComponent('initiator') - }) - - // first sink is ok - await stream.sink([]) - - // cannot sink twice - await expect(stream.sink([])) - .to.eventually.be.rejected.with.property('name', 'StreamStateError') - }) - - it('should chunk really big messages', async () => { - const msgs: Message[] = [] - const mockSend = async (msg: Message): Promise => { msgs.push(msg) } - const id = randomInt(1000) - const name = `STREAM${Date.now()}` - const maxMsgSize = 10 - const stream = createStream({ - id, - name, - send: mockSend, - maxMsgSize, - log: defaultLogger().forComponent('initiator') - }) - const input = [ - new Uint8Array(1024).map(() => randomInt(0, 255)) - ] - const output = new Uint8ArrayList() - - await pipe(input, stream) - - expect(msgs).to.have.lengthOf(105) - expect(msgs[0].id).to.equal(id) - expectMsgType(msgs[0].type, MessageTypes.NEW_STREAM) - - for (let i = 1; i < msgs.length - 1; i++) { - const msg = msgs[i] - expectMsgType(msg.type, MessageTypes.MESSAGE_INITIATOR) - - if (msg.type === MessageTypes.MESSAGE_INITIATOR) { - output.append(msg.data) - } - } - - expectMsgType(msgs[msgs.length - 1].type, MessageTypes.CLOSE_INITIATOR) - expect(output.subarray()).to.equalBytes(input[0]) - }) }) diff --git a/packages/stream-multiplexer-yamux/.aegir.js b/packages/stream-multiplexer-yamux/.aegir.js new file mode 100644 index 0000000000..7ecc20b05a --- /dev/null +++ b/packages/stream-multiplexer-yamux/.aegir.js @@ -0,0 +1,7 @@ + +/** @type {import('aegir/types').PartialOptions} */ +export default { + build: { + bundlesizeMax: '9.5kB' + } +} diff --git a/packages/stream-multiplexer-yamux/README.md b/packages/stream-multiplexer-yamux/README.md new file mode 100644 index 0000000000..f3b3ba4c8a --- /dev/null +++ b/packages/stream-multiplexer-yamux/README.md @@ -0,0 +1,127 @@ +# @chainsafe/libp2p-yamux + +[![codecov](https://img.shields.io/codecov/c/github/ChainSafe/js-libp2p-yamux.svg?style=flat-square)](https://codecov.io/gh/ChainSafe/js-libp2p-yamux) +[![CI](https://img.shields.io/github/actions/workflow/status/ChainSafe/js-libp2p-yamux/js-test-and-release.yml?branch=master\&style=flat-square)](https://github.com/ChainSafe/js-libp2p-yamux/actions/workflows/js-test-and-release.yml?query=branch%3Amaster) + +> Yamux stream multiplexer for libp2p + +# About + + + +This module is a JavaScript implementation of [Yamux from Hashicorp](https://github.com/hashicorp/yamux/blob/master/spec.md) designed to be used with [js-libp2p](https://github.com/libp2p/js-libp2p). + +## Example - Configure libp2p with Yamux + +```typescript +import { createLibp2p } from 'libp2p' +import { yamux } from '@chainsafe/libp2p-yamux' + +const node = await createLibp2p({ + // ... other options + streamMuxers: [ + yamux() + ] +}) +``` + +## Example - Using the low-level API + +```js +import { yamux } from '@chainsafe/libp2p-yamux' +import { pipe } from 'it-pipe' +import { duplexPair } from 'it-pair/duplex' +import all from 'it-all' + +// Connect two yamux muxers to demo basic stream multiplexing functionality + +const clientMuxer = yamux({ + client: true, + onIncomingStream: stream => { + // echo data on incoming streams + pipe(stream, stream) + }, + onStreamEnd: stream => { + // do nothing + } +})() + +const serverMuxer = yamux({ + client: false, + onIncomingStream: stream => { + // echo data on incoming streams + pipe(stream, stream) + }, + onStreamEnd: stream => { + // do nothing + } +})() + +// `p` is our "connections", what we use to connect the two sides +// In a real application, a connection is usually to a remote computer +const p = duplexPair() + +// connect the muxers together +pipe(p[0], clientMuxer, p[0]) +pipe(p[1], serverMuxer, p[1]) + +// now either side can open streams +const stream0 = clientMuxer.newStream() +const stream1 = serverMuxer.newStream() + +// Send some data to the other side +const encoder = new TextEncoder() +const data = [encoder.encode('hello'), encoder.encode('world')] +pipe(data, stream0) + +// Receive data back +const result = await pipe(stream0, all) + +// close a stream +stream1.close() + +// close the muxer +clientMuxer.close() +``` + +# Install + +```console +$ npm i @chainsafe/libp2p-yamux +``` + +## Browser ` +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](https://github.com/ChainSafe/js-libp2p-yamux/LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](https://github.com/ChainSafe/js-libp2p-yamux/LICENSE-MIT) / ) + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/packages/stream-multiplexer-yamux/package.json b/packages/stream-multiplexer-yamux/package.json new file mode 100644 index 0000000000..5b5ee973ad --- /dev/null +++ b/packages/stream-multiplexer-yamux/package.json @@ -0,0 +1,189 @@ +{ + "name": "@chainsafe/libp2p-yamux", + "version": "7.0.4", + "description": "Yamux stream multiplexer for libp2p", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/ChainSafe/js-libp2p-yamux#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/ChainSafe/js-libp2p-yamux.git" + }, + "bugs": { + "url": "https://github.com/ChainSafe/js-libp2p-yamux/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "IPFS", + "libp2p", + "multiplexer", + "muxer", + "stream" + ], + "type": "module", + "types": "./dist/src/index.d.ts", + "typesVersions": { + "*": { + "*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ], + "src/*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ] + } + }, + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + }, + "./config": { + "types": "./dist/src/config.d.ts", + "import": "./dist/src/config.js" + }, + "./stream": { + "types": "./dist/src/stream.d.ts", + "import": "./dist/src/stream.js" + } + }, + "release": { + "branches": [ + "master" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", + "benchmark": "benchmark dist/test/bench/*.bench.js --timeout 400000", + "build": "aegir build", + "test": "aegir test", + "test:chrome": "aegir test -t browser", + "test:chrome-webworker": "aegir test -t webworker", + "test:firefox": "aegir test -t browser -- --browser firefox", + "test:firefox-webworker": "aegir test -t webworker -- --browser firefox", + "test:node": "aegir test -t node --cov", + "test:electron-main": "aegir test -t electron-main", + "release": "aegir release", + "docs": "aegir docs" + }, + "dependencies": { + "@libp2p/interface": "^2.0.0", + "@libp2p/utils": "^6.0.0", + "race-signal": "^1.1.3", + "uint8arraylist": "^2.4.8" + }, + "devDependencies": { + "@dapplion/benchmark": "^1.0.0", + "@libp2p/interface-compliance-tests": "^6.4.12", + "@libp2p/mplex": "^11.0.0", + "aegir": "^47.0.17", + "it-all": "^3.0.9", + "it-drain": "^3.0.5", + "it-pushable": "^3.2.3", + "p-event": "^6.0.1" + } +} diff --git a/packages/stream-multiplexer-yamux/src/config.ts b/packages/stream-multiplexer-yamux/src/config.ts new file mode 100644 index 0000000000..0493607027 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/config.ts @@ -0,0 +1,81 @@ +import { InvalidParametersError } from '@libp2p/interface' +import { INITIAL_STREAM_WINDOW, MAX_STREAM_WINDOW } from './constants.js' + +// TODO use config items or delete them +export interface Config { + /** + * Used to do periodic keep alive messages using a ping. + */ + enableKeepAlive: boolean + + /** + * How often to perform the keep alive + * + * measured in milliseconds + */ + keepAliveInterval: number + + /** + * Maximum number of concurrent inbound streams that we accept. + * If the peer tries to open more streams, those will be reset immediately. + */ + maxInboundStreams: number + + /** + * Maximum number of concurrent outbound streams that we accept. + * If the application tries to open more streams, the call to `newStream` will throw + */ + maxOutboundStreams: number + + /** + * Used to control the initial window size that we allow for a stream. + * + * measured in bytes + */ + initialStreamWindowSize: number + + /** + * Used to control the maximum window size that we allow for a stream. + */ + maxStreamWindowSize: number + + /** + * Maximum size of a message that we'll send on a stream. + * This ensures that a single stream doesn't hog a connection. + */ + maxMessageSize: number +} + +export const defaultConfig: Config = { + enableKeepAlive: true, + keepAliveInterval: 30_000, + maxInboundStreams: 1_000, + maxOutboundStreams: 1_000, + initialStreamWindowSize: INITIAL_STREAM_WINDOW, + maxStreamWindowSize: MAX_STREAM_WINDOW, + maxMessageSize: 64 * 1024 +} + +export function verifyConfig (config: Config): void { + if (config.keepAliveInterval <= 0) { + throw new InvalidParametersError('keep-alive interval must be positive') + } + if (config.maxInboundStreams < 0) { + throw new InvalidParametersError('max inbound streams must be larger or equal 0') + } + if (config.maxOutboundStreams < 0) { + throw new InvalidParametersError('max outbound streams must be larger or equal 0') + } + if (config.initialStreamWindowSize < INITIAL_STREAM_WINDOW) { + throw new InvalidParametersError('InitialStreamWindowSize must be larger or equal 256 kB') + } + if (config.maxStreamWindowSize < config.initialStreamWindowSize) { + throw new InvalidParametersError('MaxStreamWindowSize must be larger than the InitialStreamWindowSize') + } + if (config.maxStreamWindowSize > 2 ** 32 - 1) { + throw new InvalidParametersError('MaxStreamWindowSize must be less than equal MAX_UINT32') + } + if (config.maxMessageSize < 1024) { + throw new InvalidParametersError('MaxMessageSize must be greater than a kilobyte') + } +} diff --git a/packages/stream-multiplexer-yamux/src/constants.ts b/packages/stream-multiplexer-yamux/src/constants.ts new file mode 100644 index 0000000000..0bba202c16 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/constants.ts @@ -0,0 +1,25 @@ +// Protocol violation errors + +import { BothClientsError, DecodeInvalidVersionError, InvalidFrameError, NotMatchingPingError, ReceiveWindowExceededError, StreamAlreadyExistsError, UnRequestedPingError } from './errors.js' + +export const PROTOCOL_ERRORS = new Set([ + InvalidFrameError.name, + UnRequestedPingError.name, + NotMatchingPingError.name, + StreamAlreadyExistsError.name, + DecodeInvalidVersionError.name, + BothClientsError.name, + ReceiveWindowExceededError.name +]) + +/** + * INITIAL_STREAM_WINDOW is the initial stream window size. + * + * Not an implementation choice, this is defined in the specification + */ +export const INITIAL_STREAM_WINDOW = 256 * 1024 + +/** + * Default max stream window + */ +export const MAX_STREAM_WINDOW = 16 * 1024 * 1024 diff --git a/packages/stream-multiplexer-yamux/src/decode.ts b/packages/stream-multiplexer-yamux/src/decode.ts new file mode 100644 index 0000000000..8cf0a652fd --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/decode.ts @@ -0,0 +1,104 @@ +import { Uint8ArrayList } from 'uint8arraylist' +import { InvalidFrameError } from './errors.js' +import { FrameType, HEADER_LENGTH, YAMUX_VERSION } from './frame.js' +import type { FrameHeader } from './frame.js' + +export interface Frame { + header: FrameHeader + data?: Uint8ArrayList +} + +export interface DataFrame { + header: FrameHeader + data: Uint8ArrayList +} + +export function isDataFrame (frame: Frame): frame is DataFrame { + return frame.header.type === FrameType.Data && frame.data !== null +} + +// used to bit shift in decoding +// native bit shift can overflow into a negative number, so we bit shift by +// multiplying by a power of 2 +const twoPow24 = 2 ** 24 + +/** + * Decode a header from the front of a buffer + * + * @param data - Assumed to have enough bytes for a header + */ +export function decodeHeader (data: Uint8Array): FrameHeader { + if (data[0] !== YAMUX_VERSION) { + throw new InvalidFrameError('Invalid frame version') + } + + return { + type: data[1], + flag: (data[2] << 8) + data[3], + streamID: (data[4] * twoPow24) + (data[5] << 16) + (data[6] << 8) + data[7], + length: (data[8] * twoPow24) + (data[9] << 16) + (data[10] << 8) + data[11] + } +} + +/** + * Decodes yamux frames from a source + */ +export class Decoder { + /** Buffer for in-progress frames */ + private readonly buffer: Uint8ArrayList + + constructor () { + this.buffer = new Uint8ArrayList() + } + + /** + * Emits frames from the decoder source. + * + * Note: If `readData` is emitted, it _must_ be called before the next iteration + * Otherwise an error is thrown + */ + * emitFrames (buf: Uint8Array | Uint8ArrayList): Generator { + this.buffer.append(buf) + + // Loop to consume as many bytes from the buffer as possible + // Eg: when a single chunk contains several frames + while (true) { + const frame = this.readFrame() + + if (frame === undefined) { + break + } + + yield frame + } + } + + private readFrame (): Frame | undefined { + let frameSize = HEADER_LENGTH + + if (this.buffer.byteLength < HEADER_LENGTH) { + // not enough data yet + return + } + + const header = decodeHeader(this.buffer.subarray(0, HEADER_LENGTH)) + + if (header.type === FrameType.Data) { + frameSize += header.length + + if (this.buffer.byteLength < frameSize) { + // not enough data yet + return + } + + const data = this.buffer.sublist(HEADER_LENGTH, frameSize) + this.buffer.consume(frameSize) + + return { header, data } + } + + this.buffer.consume(frameSize) + + return { header } + } +} diff --git a/packages/stream-multiplexer-yamux/src/encode.ts b/packages/stream-multiplexer-yamux/src/encode.ts new file mode 100644 index 0000000000..6353c00916 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/encode.ts @@ -0,0 +1,26 @@ +import { HEADER_LENGTH } from './frame.js' +import type { FrameHeader } from './frame.js' + +export function encodeHeader (header: FrameHeader): Uint8Array { + const frame = new Uint8Array(HEADER_LENGTH) + + // always assume version 0 + // frameView.setUint8(0, header.version) + + frame[1] = header.type + + frame[2] = header.flag >>> 8 + frame[3] = header.flag + + frame[4] = header.streamID >>> 24 + frame[5] = header.streamID >>> 16 + frame[6] = header.streamID >>> 8 + frame[7] = header.streamID + + frame[8] = header.length >>> 24 + frame[9] = header.length >>> 16 + frame[10] = header.length >>> 8 + frame[11] = header.length + + return frame +} diff --git a/packages/stream-multiplexer-yamux/src/errors.ts b/packages/stream-multiplexer-yamux/src/errors.ts new file mode 100644 index 0000000000..e449d3211b --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/errors.ts @@ -0,0 +1,89 @@ +import { GoAwayCode } from './frame.ts' + +export class ProtocolError extends Error { + static name = 'ProtocolError' + + public reason: GoAwayCode + + constructor (message: string, reason: GoAwayCode) { + super(message) + this.name = 'ProtocolError' + this.reason = reason + } +} + +export function isProtocolError (err?: any): err is ProtocolError { + return err?.reason !== null +} + +export class InvalidFrameError extends ProtocolError { + static name = 'InvalidFrameError' + + constructor (message = 'The frame was invalid') { + super(message, GoAwayCode.ProtocolError) + this.name = 'InvalidFrameError' + } +} + +export class UnRequestedPingError extends ProtocolError { + static name = 'UnRequestedPingError' + + constructor (message = 'Un-requested ping error') { + super(message, GoAwayCode.ProtocolError) + this.name = 'UnRequestedPingError' + } +} + +export class NotMatchingPingError extends ProtocolError { + static name = 'NotMatchingPingError' + + constructor (message = 'Not matching ping error') { + super(message, GoAwayCode.ProtocolError) + this.name = 'NotMatchingPingError' + } +} + +export class InvalidStateError extends Error { + static name = 'InvalidStateError' + + constructor (message = 'Invalid state') { + super(message) + this.name = 'InvalidStateError' + } +} + +export class StreamAlreadyExistsError extends ProtocolError { + static name = 'StreamAlreadyExistsError' + + constructor (message = 'Stream already exists') { + super(message, GoAwayCode.ProtocolError) + this.name = 'StreamAlreadyExistsError' + } +} + +export class DecodeInvalidVersionError extends ProtocolError { + static name = 'DecodeInvalidVersionError' + + constructor (message = 'Decode invalid version') { + super(message, GoAwayCode.ProtocolError) + this.name = 'DecodeInvalidVersionError' + } +} + +export class BothClientsError extends ProtocolError { + static name = 'BothClientsError' + + constructor (message = 'Both clients') { + super(message, GoAwayCode.ProtocolError) + this.name = 'BothClientsError' + } +} + +export class ReceiveWindowExceededError extends ProtocolError { + static name = 'ReceiveWindowExceededError' + + constructor (message = 'Receive window exceeded') { + super(message, GoAwayCode.ProtocolError) + this.name = 'ReceiveWindowExceededError' + } +} diff --git a/packages/stream-multiplexer-yamux/src/frame.ts b/packages/stream-multiplexer-yamux/src/frame.ts new file mode 100644 index 0000000000..d2d814fe35 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/frame.ts @@ -0,0 +1,64 @@ +export enum FrameType { + /** Used to transmit data. May transmit zero length payloads depending on the flags. */ + Data = 0x0, + /** Used to updated the senders receive window size. This is used to implement per-session flow control. */ + WindowUpdate = 0x1, + /** Used to measure RTT. It can also be used to heart-beat and do keep-alive over TCP. */ + Ping = 0x2, + /** Used to close a session. */ + GoAway = 0x3 +} + +export enum Flag { + /** Signals the start of a new stream. May be sent with a data or window update message. Also sent with a ping to indicate outbound. */ + SYN = 0x1, + /** Acknowledges the start of a new stream. May be sent with a data or window update message. Also sent with a ping to indicate response. */ + ACK = 0x2, + /** Performs a half-close of a stream. May be sent with a data message or window update. */ + FIN = 0x4, + /** Reset a stream immediately. May be sent with a data or window update message. */ + RST = 0x8 +} + +const flagCodes = Object.values(Flag).filter((x) => typeof x !== 'string') as Flag[] + +export const YAMUX_VERSION = 0 + +export enum GoAwayCode { + NormalTermination = 0x0, + ProtocolError = 0x1, + InternalError = 0x2 +} + +export const HEADER_LENGTH = 12 + +export interface FrameHeader { + /** + * The version field is used for future backward compatibility. + * At the current time, the field is always set to 0, to indicate the initial version. + */ + version?: number + /** The type field is used to switch the frame message type. */ + type: FrameType + /** The flags field is used to provide additional information related to the message type. */ + flag: number + /** + * The StreamID field is used to identify the logical stream the frame is addressing. + * The client side should use odd ID's, and the server even. + * This prevents any collisions. Additionally, the 0 ID is reserved to represent the session. + */ + streamID: number + /** + * The meaning of the length field depends on the message type: + * Data - provides the length of bytes following the header + * Window update - provides a delta update to the window size + * Ping - Contains an opaque value, echoed back + * Go Away - Contains an error code + */ + length: number +} + +export function stringifyHeader (header: FrameHeader): string { + const flags = flagCodes.filter(f => (header.flag & f) === f).map(f => Flag[f]).join('|') + return `streamID=${header.streamID} type=${FrameType[header.type]} flag=${flags} length=${header.length}` +} diff --git a/packages/stream-multiplexer-yamux/src/index.ts b/packages/stream-multiplexer-yamux/src/index.ts new file mode 100644 index 0000000000..31298d6a30 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/index.ts @@ -0,0 +1,90 @@ +/** + * @packageDocumentation + * + * This module is a JavaScript implementation of [Yamux from Hashicorp](https://github.com/hashicorp/yamux/blob/master/spec.md) designed to be used with [js-libp2p](https://github.com/libp2p/js-libp2p). + * + * @example Configure libp2p with Yamux + * + * ```typescript + * import { createLibp2p } from 'libp2p' + * import { yamux } from '@chainsafe/libp2p-yamux' + * + * const node = await createLibp2p({ + * // ... other options + * streamMuxers: [ + * yamux() + * ] + * }) + * ``` + * + * @example Using the low-level API + * + * ```js + * import { yamux } from '@chainsafe/libp2p-yamux' + * import { pipe } from 'it-pipe' + * import { duplexPair } from 'it-pair/duplex' + * import all from 'it-all' + * + * // Connect two yamux muxers to demo basic stream multiplexing functionality + * + * const clientMuxer = yamux({ + * client: true, + * onIncomingStream: stream => { + * // echo data on incoming streams + * pipe(stream, stream) + * }, + * onStreamEnd: stream => { + * // do nothing + * } + * })() + * + * const serverMuxer = yamux({ + * client: false, + * onIncomingStream: stream => { + * // echo data on incoming streams + * pipe(stream, stream) + * }, + * onStreamEnd: stream => { + * // do nothing + * } + * })() + * + * // `p` is our "connections", what we use to connect the two sides + * // In a real application, a connection is usually to a remote computer + * const p = duplexPair() + * + * // connect the muxers together + * pipe(p[0], clientMuxer, p[0]) + * pipe(p[1], serverMuxer, p[1]) + * + * // now either side can open streams + * const stream0 = clientMuxer.newStream() + * const stream1 = serverMuxer.newStream() + * + * // Send some data to the other side + * const encoder = new TextEncoder() + * const data = [encoder.encode('hello'), encoder.encode('world')] + * pipe(data, stream0) + * + * // Receive data back + * const result = await pipe(stream0, all) + * + * // close a stream + * stream1.close() + * + * // close the muxer + * clientMuxer.close() + * ``` + */ + +import { Yamux } from './muxer.js' +import type { YamuxMuxer, YamuxMuxerInit } from './muxer.js' +import type { StreamMuxerFactory } from '@libp2p/interface' + +export { GoAwayCode } from './frame.js' +export type { FrameHeader, FrameType } from './frame.js' +export type { YamuxMuxerInit } + +export function yamux (init: YamuxMuxerInit = {}): () => StreamMuxerFactory { + return () => new Yamux(init) +} diff --git a/packages/stream-multiplexer-yamux/src/muxer.ts b/packages/stream-multiplexer-yamux/src/muxer.ts new file mode 100644 index 0000000000..1b8178483e --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/muxer.ts @@ -0,0 +1,487 @@ +import { InvalidParametersError, MuxerClosedError, TooManyOutboundProtocolStreamsError, serviceCapabilities } from '@libp2p/interface' +import { AbstractStreamMuxer, repeatingTask } from '@libp2p/utils' +import { raceSignal } from 'race-signal' +import { Uint8ArrayList } from 'uint8arraylist' +import { defaultConfig, verifyConfig } from './config.js' +import { Decoder } from './decode.js' +import { encodeHeader } from './encode.js' +import { InvalidFrameError, isProtocolError, NotMatchingPingError, UnRequestedPingError } from './errors.js' +import { Flag, FrameType, GoAwayCode } from './frame.js' +import { StreamState, YamuxStream } from './stream.js' +import type { Config } from './config.js' +import type { Frame } from './decode.js' +import type { FrameHeader } from './frame.js' +import type { AbortOptions, MessageStream, StreamMuxerFactory } from '@libp2p/interface' +import type { RepeatingTask } from '@libp2p/utils' + +function debugFrame (header: FrameHeader): any { + return { + type: FrameType[header.type], + flags: [ + (header.flag & Flag.SYN) === Flag.SYN ? 'SYN' : undefined, + (header.flag & Flag.ACK) === Flag.ACK ? 'ACK' : undefined, + (header.flag & Flag.FIN) === Flag.FIN ? 'FIN' : undefined, + (header.flag & Flag.RST) === Flag.RST ? 'RST' : undefined + ].filter(Boolean), + streamID: header.streamID, + length: header.length + } +} + +const YAMUX_PROTOCOL_ID = '/yamux/1.0.0' + +export interface YamuxMuxerInit extends Partial { +} + +export class Yamux implements StreamMuxerFactory { + protocol = YAMUX_PROTOCOL_ID + private readonly _init: Partial + + constructor (init: Partial = {}) { + this._init = init + } + + readonly [Symbol.toStringTag] = '@chainsafe/libp2p-yamux' + + readonly [serviceCapabilities]: string[] = [ + '@libp2p/stream-multiplexing' + ] + + createStreamMuxer (maConn: MessageStream): YamuxMuxer { + return new YamuxMuxer(maConn, { + ...this._init + }) + } +} + +export interface CloseOptions extends AbortOptions { + reason?: GoAwayCode +} + +export interface ActivePing extends PromiseWithResolvers { + id: number + start: number +} + +export class YamuxMuxer extends AbstractStreamMuxer { + private readonly config: Config + + /** The next stream id to be used when initiating a new stream */ + private nextStreamID: number + + /** The next ping id to be used when pinging */ + private nextPingID: number + /** Tracking info for the currently active ping */ + private activePing?: ActivePing + /** Round trip time */ + private rtt: number + + /** True if client, false if server */ + private client: boolean + + private localGoAway?: GoAwayCode + private remoteGoAway?: GoAwayCode + + /** Number of tracked inbound streams */ + private numInboundStreams: number + /** Number of tracked outbound streams */ + private numOutboundStreams: number + + private decoder: Decoder + private keepAlive?: RepeatingTask + + constructor (maConn: MessageStream, init: YamuxMuxerInit = {}) { + super(maConn, { + protocol: YAMUX_PROTOCOL_ID, + name: 'yamux' + }) + + this.client = maConn.direction === 'outbound' + this.config = { ...defaultConfig, ...init } + verifyConfig(this.config) + + this.decoder = new Decoder() + + this.numInboundStreams = 0 + this.numOutboundStreams = 0 + + // client uses odd streamIDs, server uses even streamIDs + this.nextStreamID = this.client ? 1 : 2 + + this.nextPingID = 0 + this.rtt = -1 + + this.log.trace('muxer created') + + if (this.config.enableKeepAlive) { + this.log.trace('muxer keepalive enabled interval=%s', this.config.keepAliveInterval) + this.keepAlive = repeatingTask(async (options) => { + try { + await this.ping(options) + } catch (err: any) { + // TODO: should abort here? + this.log.error('ping error: %s', err) + } + }, this.config.keepAliveInterval, { + // send an initial ping to establish RTT + runImmediately: true + }) + this.keepAlive.start() + } + } + + onData (buf: Uint8Array | Uint8ArrayList): void { + for (const frame of this.decoder.emitFrames(buf)) { + this.handleFrame(frame) + } + } + + onCreateStream (): YamuxStream { + if (this.remoteGoAway !== undefined) { + throw new MuxerClosedError('Muxer closed remotely') + } + if (this.localGoAway !== undefined) { + throw new MuxerClosedError('Muxer closed locally') + } + + const id = this.nextStreamID + this.nextStreamID += 2 + + // check against our configured maximum number of outbound streams + if (this.numOutboundStreams >= this.config.maxOutboundStreams) { + throw new TooManyOutboundProtocolStreamsError('max outbound streams exceeded') + } + + this.log.trace('new outgoing stream id=%s', id) + + const stream = this._newStream(id, StreamState.Init, 'outbound') + + this.numOutboundStreams++ + + // send a window update to open the stream on the receiver end. do this in a + // microtask so the stream gets added to the streams array by the superclass + // before we send the SYN flag, otherwise we create a race condition whereby + // we can receive the ACK before the stream is added to the streams list + queueMicrotask(() => { + stream.sendWindowUpdate() + }) + + return stream + } + + /** + * Initiate a ping and wait for a response + * + * Note: only a single ping will be initiated at a time. + * If a ping is already in progress, a new ping will not be initiated. + * + * @returns the round-trip-time in milliseconds + */ + async ping (options?: AbortOptions): Promise { + if (this.remoteGoAway !== undefined) { + throw new MuxerClosedError('Muxer closed remotely') + } + if (this.localGoAway !== undefined) { + throw new MuxerClosedError('Muxer closed locally') + } + + if (this.activePing != null) { + // an active ping is already in progress, piggyback off that + return raceSignal(this.activePing.promise, options?.signal) + } + + // An active ping does not yet exist, handle the process here + // create active ping + this.activePing = Object.assign(Promise.withResolvers(), { + id: this.nextPingID++, + start: Date.now() + }) + // send ping + this.sendPing(this.activePing.id) + // await pong + try { + this.rtt = await raceSignal(this.activePing.promise, options?.signal) + } finally { + // clean-up active ping + this.activePing = undefined + } + + return this.rtt + } + + /** + * Get the ping round trip time + * + * Note: Will return 0 if no successful ping has yet been completed + * + * @returns the round-trip-time in milliseconds + */ + getRTT (): number { + return this.rtt + } + + /** + * Close the muxer + */ + async close (options: CloseOptions = {}): Promise { + if (this.status !== 'open') { + // already closed + return + } + + try { + const reason = options?.reason ?? GoAwayCode.NormalTermination + + this.log.trace('muxer close reason=%s', GoAwayCode[reason]) + + await super.close(options) + + // send reason to the other side, allow the other side to close gracefully + this.sendGoAway(reason) + } finally { + this.keepAlive?.stop() + } + } + + abort (err: Error): void { + if (this.status !== 'open') { + // already closed + return + } + + try { + super.abort(err) + + let reason = GoAwayCode.InternalError + + if (isProtocolError(err)) { + reason = err.reason + } + + // If reason was provided, use that, otherwise use the presence of `err` to determine the reason + this.log.error('muxer abort reason=%s error=%s', reason, err) + + // send reason to the other side, allow the other side to close gracefully + this.sendGoAway(reason) + } finally { + this.keepAlive?.stop() + } + } + + onTransportClosed (): void { + try { + super.onTransportClosed() + } finally { + this.keepAlive?.stop() + } + } + + /** Create a new stream */ + private _newStream (streamId: number, state: StreamState, direction: 'inbound' | 'outbound'): YamuxStream { + if (this.streams.find(s => s.streamId === streamId) != null) { + throw new InvalidParametersError('Stream already exists with that id') + } + + const stream = new YamuxStream({ + id: `${streamId}`, + streamId, + state, + direction, + sendFrame: this.sendFrame.bind(this), + log: this.log.newScope(`${direction}:${streamId}`), + config: this.config, + getRTT: this.getRTT.bind(this) + }) + + stream.addEventListener('close', () => { + this.closeStream(streamId) + }, { + once: true + }) + + return stream + } + + /** + * closeStream is used to close a stream once both sides have + * issued a close. + */ + private closeStream (id: number): void { + if (this.client === (id % 2 === 0)) { + this.numInboundStreams-- + } else { + this.numOutboundStreams-- + } + } + + private handleFrame (frame: Frame): void { + const { + streamID, + type, + length + } = frame.header + + this.log.trace('received frame %o', debugFrame(frame.header)) + + if (streamID === 0) { + switch (type) { + case FrameType.Ping: + { this.handlePing(frame.header); return } + case FrameType.GoAway: + { this.handleGoAway(length); return } + default: + // Invalid state + throw new InvalidFrameError('Invalid frame type') + } + } else { + switch (frame.header.type) { + case FrameType.Data: + case FrameType.WindowUpdate: + { this.handleStreamMessage(frame); return } + default: + // Invalid state + throw new InvalidFrameError('Invalid frame type') + } + } + } + + private handlePing (header: FrameHeader): void { + // If the ping is initiated by the sender, send a response + if (header.flag === Flag.SYN) { + this.log.trace('received ping request pingId=%s', header.length) + this.sendPing(header.length, Flag.ACK) + } else if (header.flag === Flag.ACK) { + this.log.trace('received ping response pingId=%s', header.length) + this.handlePingResponse(header.length) + } else { + // Invalid state + throw new InvalidFrameError('Invalid frame flag') + } + } + + private handlePingResponse (pingId: number): void { + if (this.activePing === undefined) { + // this ping was not requested + throw new UnRequestedPingError('ping not requested') + } + if (this.activePing.id !== pingId) { + // this ping doesn't match our active ping request + throw new NotMatchingPingError('ping doesn\'t match our id') + } + + // valid ping response + this.activePing.resolve(Date.now() - this.activePing.start) + } + + private handleGoAway (reason: GoAwayCode): void { + this.log.trace('received GoAway reason=%s', GoAwayCode[reason] ?? 'unknown') + this.remoteGoAway = reason + + // reset any streams that are still open and close the muxer + this.abort(new Error('Remote sent GoAway')) + } + + private handleStreamMessage (frame: Frame): void { + const { streamID, flag, type } = frame.header + + if ((flag & Flag.SYN) === Flag.SYN) { + this.incomingStream(streamID) + } + + const stream = this.streams.find(s => s.streamId === streamID) + if (stream === undefined) { + this.log.trace('frame for missing stream id=%s', streamID) + + return + } + + switch (type) { + case FrameType.WindowUpdate: { + stream.handleWindowUpdate(frame); return + } + case FrameType.Data: { + stream.handleData(frame); return + } + default: + throw new Error('unreachable') + } + } + + private incomingStream (id: number): void { + if (this.client !== (id % 2 === 0)) { + throw new InvalidParametersError('Both endpoints are clients') + } + if (this.streams.find(s => s.streamId === id)) { + return + } + + this.log.trace('new incoming stream id=%s', id) + + if (this.localGoAway !== undefined) { + // reject (reset) immediately if we are doing a go away + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: id, + length: 0 + }) + return + } + + // check against our configured maximum number of inbound streams + if (this.numInboundStreams >= this.config.maxInboundStreams) { + this.log('maxIncomingStreams exceeded, forcing stream reset') + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: id, + length: 0 + }); return + } + + // allocate a new stream + const stream = this._newStream(id, StreamState.SYNReceived, 'inbound') + this.onRemoteStream(stream) + + this.numInboundStreams++ + // the stream should now be tracked + } + + private sendFrame (header: FrameHeader, data?: Uint8ArrayList): boolean { + this.log.trace('sending frame %o', debugFrame(header)) + if (header.type === FrameType.Data) { + if (data === undefined) { + throw new InvalidFrameError('Invalid frame') + } + + return this.send(new Uint8ArrayList(encodeHeader(header), data)) + } else { + return this.send(encodeHeader(header)) + } + } + + private sendPing (pingId: number, flag: Flag = Flag.SYN): void { + if (flag === Flag.SYN) { + this.log.trace('sending ping request pingId=%s', pingId) + } else { + this.log.trace('sending ping response pingId=%s', pingId) + } + this.sendFrame({ + type: FrameType.Ping, + flag, + streamID: 0, + length: pingId + }) + } + + private sendGoAway (reason: GoAwayCode = GoAwayCode.NormalTermination): void { + this.log('sending GoAway reason=%s', GoAwayCode[reason]) + this.localGoAway = reason + this.sendFrame({ + type: FrameType.GoAway, + flag: 0, + streamID: 0, + length: reason + }) + } +} diff --git a/packages/stream-multiplexer-yamux/src/stream.ts b/packages/stream-multiplexer-yamux/src/stream.ts new file mode 100644 index 0000000000..cd710af3a8 --- /dev/null +++ b/packages/stream-multiplexer-yamux/src/stream.ts @@ -0,0 +1,290 @@ +import { AbstractStream } from '@libp2p/utils' +import { Uint8ArrayList } from 'uint8arraylist' +import { INITIAL_STREAM_WINDOW } from './constants.js' +import { isDataFrame } from './decode.ts' +import { InvalidFrameError, ReceiveWindowExceededError } from './errors.js' +import { Flag, FrameType, HEADER_LENGTH } from './frame.js' +import type { Config } from './config.js' +import type { Frame } from './decode.ts' +import type { FrameHeader } from './frame.js' +import type { AbortOptions } from '@libp2p/interface' +import type { AbstractStreamInit, SendResult } from '@libp2p/utils' + +export enum StreamState { + Init, + SYNSent, + SYNReceived, + Established, + Finished, + Paused +} + +export interface YamuxStreamInit extends AbstractStreamInit { + streamId: number + sendFrame(header: FrameHeader, body?: Uint8ArrayList): boolean + getRTT(): number + config: Config + state: StreamState +} + +/** YamuxStream is used to represent a logical stream within a session */ +export class YamuxStream extends AbstractStream { + streamId: number + state: StreamState + + private readonly config: Config + + /** The number of available bytes to send */ + private sendWindowCapacity: number + /** The number of bytes available to receive in a full window */ + private recvWindow: number + /** The number of available bytes to receive */ + private recvWindowCapacity: number + + /** + * An 'epoch' is the time it takes to process and read data + * + * Used in conjunction with RTT to determine whether to increase the recvWindow + */ + private epochStart: number + private readonly getRTT: () => number + + private readonly sendFrame: (header: FrameHeader, body?: Uint8ArrayList) => boolean + + constructor (init: YamuxStreamInit) { + super(init) + + this.config = init.config + this.streamId = init.streamId + this.state = init.state + this.sendWindowCapacity = INITIAL_STREAM_WINDOW + this.recvWindow = this.config.initialStreamWindowSize + this.recvWindowCapacity = this.recvWindow + this.epochStart = Date.now() + this.getRTT = init.getRTT + this.sendFrame = init.sendFrame + + this.addEventListener('message', () => { + this.sendWindowUpdate() + }) + + this.addEventListener('close', () => { + this.state = StreamState.Finished + }) + } + + /** + * Send a data message to the remote muxer + */ + sendData (buf: Uint8ArrayList): SendResult { + const totalBytes = buf.byteLength + let sentBytes = 0 + let canSendMore = true + + // send in chunks, waiting for window updates + while (buf.byteLength !== 0) { + // we exhausted the send window, sending will resume later + if (this.sendWindowCapacity === 0) { + canSendMore = false + this.log?.trace('sent %d/%d bytes, wait for send window update, status %s', sentBytes, totalBytes, this.status) + break + } + + // send as much as we can + const toSend = Math.min(this.sendWindowCapacity, this.config.maxMessageSize - HEADER_LENGTH, buf.length) + const flags = this.getSendFlags() + + const muxerSendMore = this.sendFrame({ + type: FrameType.Data, + flag: flags, + streamID: this.streamId, + length: toSend + }, buf.sublist(0, toSend)) + + this.sendWindowCapacity -= toSend + + sentBytes += toSend + buf.consume(toSend) + + if (!muxerSendMore) { + canSendMore = muxerSendMore + break + } + } + + return { + sentBytes, + canSendMore + } + } + + /** + * Send a reset message to the remote muxer + */ + async sendReset (): Promise { + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: this.streamId, + length: 0 + }) + } + + /** + * Send a message to the remote muxer, informing them no more data messages + * will be sent by this end of the stream + */ + async sendCloseWrite (): Promise { + const flags = this.getSendFlags() | Flag.FIN + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: flags, + streamID: this.streamId, + length: 0 + }) + } + + /** + * Send a message to the remote muxer, informing them no more data messages + * will be read by this end of the stream - this is a no-op on Yamux streams + */ + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + /** + * Stop sending window updates temporarily - in the interim the the remote + * send window will exhaust and the remote will stop sending data + */ + sendPause (): void { + this.state = StreamState.Paused + } + + /** + * Start sending window updates as normal + */ + sendResume (): void { + this.state = StreamState.Established + this.sendWindowUpdate() + } + + /** + * handleWindowUpdate is called when the stream receives a window update frame + */ + handleWindowUpdate (frame: Frame): void { + this.log?.trace('stream received window update') + this.processFlags(frame.header.flag) + + // increase send window + const available = this.sendWindowCapacity + this.sendWindowCapacity += frame.header.length + + // if the update increments a 0 availability, notify the stream that sending can resume + if (available === 0 && frame.header.length > 0) { + this.safeDispatchEvent('drain') + } + } + + /** + * handleData is called when the stream receives a data frame + */ + handleData (frame: Frame): void { + if (!isDataFrame(frame)) { + throw new InvalidFrameError('Frame was not data frame') + } + + this.log?.trace('stream received data') + this.processFlags(frame.header.flag) + + // check that our recv window is not exceeded + if (this.recvWindowCapacity < frame.header.length) { + throw new ReceiveWindowExceededError('Receive window exceeded') + } + + this.recvWindowCapacity -= frame.header.length + + this.onData(frame.data) + } + + /** + * processFlags is used to update the state of the stream based on set flags, if any. + */ + private processFlags (flags: number): void { + if ((flags & Flag.ACK) === Flag.ACK) { + if (this.state === StreamState.SYNSent) { + this.state = StreamState.Established + } + } + + if ((flags & Flag.FIN) === Flag.FIN) { + this.onRemoteCloseWrite() + } + + if ((flags & Flag.RST) === Flag.RST) { + this.onRemoteReset() + } + } + + /** + * getSendFlags determines any flags that are appropriate + * based on the current stream state. + * + * The state is updated as a side-effect. + */ + private getSendFlags (): number { + switch (this.state) { + case StreamState.Init: + this.state = StreamState.SYNSent + return Flag.SYN + case StreamState.SYNReceived: + this.state = StreamState.Established + return Flag.ACK + default: + return 0 + } + } + + /** + * Potentially sends a window update enabling further remote writes to take + * place. + */ + sendWindowUpdate (): void { + // determine the flags if any + const flags = this.getSendFlags() + + // If the stream has already been established + // and we've processed data within the time it takes for 4 round trips + // then we (up to) double the recvWindow + const now = Date.now() + const rtt = this.getRTT() + if (flags === 0 && rtt > -1 && now - this.epochStart < rtt * 4) { + // we've already validated that maxStreamWindowSize can't be more than MAX_UINT32 + this.recvWindow = Math.min(this.recvWindow * 2, this.config.maxStreamWindowSize) + } + + if (this.recvWindowCapacity >= this.recvWindow && flags === 0) { + // a window update isn't needed + return + } + + if (this.state === StreamState.Paused) { + // we don't want any more data from the remote right now + return + } + + // update the receive window + const delta = this.recvWindow - this.recvWindowCapacity + this.recvWindowCapacity = this.recvWindow + + // update the epoch start + this.epochStart = now + + // send window update + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: flags, + streamID: this.streamId, + length: delta + }) + } +} diff --git a/packages/stream-multiplexer-yamux/test/bench/codec.bench.ts b/packages/stream-multiplexer-yamux/test/bench/codec.bench.ts new file mode 100644 index 0000000000..fcbf9efd1c --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/bench/codec.bench.ts @@ -0,0 +1,48 @@ +import { itBench } from '@dapplion/benchmark' +import { decodeHeader } from '../../src/decode.js' +import { encodeHeader } from '../../src/encode.js' +import { Flag, FrameType } from '../../src/frame.js' +import { decodeHeaderNaive, encodeHeaderNaive } from '../codec.util.js' +import type { FrameHeader } from '../../src/frame.js' + +describe('codec benchmark', () => { + for (const { encode, name } of [ + { encode: encodeHeader, name: 'encodeFrameHeader' }, + { encode: encodeHeaderNaive, name: 'encodeFrameHeaderNaive' } + ]) { + itBench({ + id: `frame header - ${name}`, + timeoutBench: 100000000, + beforeEach: () => { + return { + type: FrameType.WindowUpdate, + flag: Flag.ACK, + streamID: 0xffffffff, + length: 0xffffffff + } + }, + fn: (header) => { + encode(header) + } + }) + } + + for (const { decode, name } of [ + { decode: decodeHeader, name: 'decodeHeader' }, + { decode: decodeHeaderNaive, name: 'decodeHeaderNaive' } + ]) { + itBench({ + id: `frame header ${name}`, + beforeEach: () => { + const header = new Uint8Array(12) + for (let i = 1; i < 12; i++) { + header[i] = 255 + } + return header + }, + fn: (header) => { + decode(header) + } + }) + } +}) diff --git a/packages/stream-multiplexer-yamux/test/bench/comparison.bench.ts b/packages/stream-multiplexer-yamux/test/bench/comparison.bench.ts new file mode 100644 index 0000000000..6c57d4821b --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/bench/comparison.bench.ts @@ -0,0 +1,54 @@ +import { itBench } from '@dapplion/benchmark' +import { mplex } from '@libp2p/mplex' +import { multiaddrConnectionPair } from '@libp2p/utils' +import { pEvent } from 'p-event' +import { yamux } from '../../src/index.ts' +import type { StreamMuxer } from '@libp2p/interface' + +interface Fixture { + client: StreamMuxer + server: StreamMuxer +} + +describe('comparison benchmark', () => { + for (const { impl, name } of [ + { impl: yamux()(), name: 'yamux' }, + { impl: mplex()(), name: 'mplex' } + ]) { + for (const { numMessages, msgSize } of [ + { numMessages: 1, msgSize: 2 ** 6 }, + { numMessages: 1, msgSize: 2 ** 10 }, + { numMessages: 1, msgSize: 2 ** 16 }, + { numMessages: 1, msgSize: 2 ** 20 }, + { numMessages: 1000, msgSize: 2 ** 6 }, + { numMessages: 1000, msgSize: 2 ** 10 }, + { numMessages: 1000, msgSize: 2 ** 16 }, + { numMessages: 1000, msgSize: 2 ** 20 } + ]) { + itBench({ + id: `${name} send and receive ${numMessages} ${msgSize / 1024}KB chunks`, + beforeEach: () => { + const [outboundConnection, inboundConnection] = multiaddrConnectionPair() + + return { + client: impl.createStreamMuxer(outboundConnection), + server: impl.createStreamMuxer(inboundConnection) + } + }, + fn: async ({ client, server }) => { + const stream = await client.createStream() + + for (let i = 0; i < numMessages; i++) { + const sendMore = stream.send(new Uint8Array(msgSize)) + + if (!sendMore) { + await pEvent(stream, 'drain') + } + } + + await stream.closeWrite() + } + }) + } + } +}) diff --git a/packages/stream-multiplexer-yamux/test/codec.spec.ts b/packages/stream-multiplexer-yamux/test/codec.spec.ts new file mode 100644 index 0000000000..1bb0bf58d2 --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/codec.spec.ts @@ -0,0 +1,31 @@ +import { expect } from 'aegir/chai' +import { decodeHeader } from '../src/decode.js' +import { encodeHeader } from '../src/encode.js' +import { Flag, FrameType, GoAwayCode, stringifyHeader } from '../src/frame.js' +import { decodeHeaderNaive, encodeHeaderNaive } from './codec.util.js' +import type { FrameHeader } from '../src/frame.js' + +const frames: Array<{ header: FrameHeader, data?: Uint8Array }> = [ + { header: { type: FrameType.Ping, flag: Flag.SYN, streamID: 0, length: 1 } }, + { header: { type: FrameType.WindowUpdate, flag: Flag.SYN, streamID: 1, length: 1 } }, + { header: { type: FrameType.GoAway, flag: 0, streamID: 0, length: GoAwayCode.NormalTermination } }, + { header: { type: FrameType.Ping, flag: Flag.ACK, streamID: 0, length: 100 } }, + { header: { type: FrameType.WindowUpdate, flag: 0, streamID: 99, length: 1000 } }, + { header: { type: FrameType.WindowUpdate, flag: 0, streamID: 0xffffffff, length: 0xffffffff } }, + { header: { type: FrameType.GoAway, flag: 0, streamID: 0, length: GoAwayCode.ProtocolError } } +] + +describe('codec', () => { + for (const { header } of frames) { + it(`should round trip encode/decode header ${stringifyHeader(header)}`, () => { + expect(decodeHeader(encodeHeader(header))).to.deep.equal(header) + }) + } + + for (const { header } of frames) { + it(`should match naive implementations of encode/decode for header ${stringifyHeader(header)}`, () => { + expect(encodeHeader(header)).to.deep.equal(encodeHeaderNaive(header)) + expect(decodeHeader(encodeHeader(header))).to.deep.equal(decodeHeaderNaive(encodeHeaderNaive(header))) + }) + } +}) diff --git a/packages/stream-multiplexer-yamux/test/codec.util.ts b/packages/stream-multiplexer-yamux/test/codec.util.ts new file mode 100644 index 0000000000..1478e70e0d --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/codec.util.ts @@ -0,0 +1,35 @@ +import { InvalidFrameError } from '../src/errors.js' +import { HEADER_LENGTH, YAMUX_VERSION } from '../src/frame.js' +import type { FrameHeader } from '../src/frame.js' + +// Slower encode / decode functions that use dataview + +export function decodeHeaderNaive (data: Uint8Array): FrameHeader { + const view = new DataView(data.buffer, data.byteOffset, data.byteLength) + + if (view.getUint8(0) !== YAMUX_VERSION) { + throw new InvalidFrameError('Invalid frame version') + } + return { + type: view.getUint8(1), + flag: view.getUint16(2, false), + streamID: view.getUint32(4, false), + length: view.getUint32(8, false) + } +} + +export function encodeHeaderNaive (header: FrameHeader): Uint8Array { + const frame = new Uint8Array(HEADER_LENGTH) + + const frameView = new DataView(frame.buffer, frame.byteOffset, frame.byteLength) + + // always assume version 0 + // frameView.setUint8(0, header.version) + + frameView.setUint8(1, header.type) + frameView.setUint16(2, header.flag, false) + frameView.setUint32(4, header.streamID, false) + frameView.setUint32(8, header.length, false) + + return frame +} diff --git a/packages/stream-multiplexer-yamux/test/compliance.spec.ts b/packages/stream-multiplexer-yamux/test/compliance.spec.ts new file mode 100644 index 0000000000..60fbfd7142 --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/compliance.spec.ts @@ -0,0 +1,13 @@ +/* eslint-env mocha */ + +import tests from '@libp2p/interface-compliance-tests/stream-muxer' +import { yamux } from '../src/index.ts' + +describe('compliance', () => { + tests({ + async setup () { + return yamux()() + }, + async teardown () {} + }) +}) diff --git a/packages/stream-multiplexer-yamux/test/decode.spec.ts b/packages/stream-multiplexer-yamux/test/decode.spec.ts new file mode 100644 index 0000000000..d6da7987a9 --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/decode.spec.ts @@ -0,0 +1,228 @@ +import { expect } from 'aegir/chai' +import all from 'it-all' +import { Uint8ArrayList } from 'uint8arraylist' +import { Decoder } from '../src/decode.js' +import { encodeHeader } from '../src/encode.js' +import { Flag, FrameType, GoAwayCode } from '../src/frame.js' +import type { FrameHeader } from '../src/frame.js' + +const frames: Array<{ header: FrameHeader, data?: Uint8Array }> = [ + { header: { type: FrameType.Ping, flag: Flag.SYN, streamID: 0, length: 1 } }, + { header: { type: FrameType.WindowUpdate, flag: Flag.SYN, streamID: 1, length: 1 } }, + { header: { type: FrameType.GoAway, flag: 0, streamID: 0, length: GoAwayCode.NormalTermination } }, + { header: { type: FrameType.Ping, flag: Flag.ACK, streamID: 0, length: 100 } }, + { header: { type: FrameType.WindowUpdate, flag: 0, streamID: 99, length: 1000 } }, + { header: { type: FrameType.GoAway, flag: 0, streamID: 0, length: GoAwayCode.ProtocolError } } +] + +const data = (length: number): Uint8Array => Uint8Array.from(Array.from({ length }), (_, i) => i) + +const expectEqualBytes = (actual: Uint8Array | Uint8ArrayList, expected: Uint8Array | Uint8ArrayList, reason?: string): void => { + expect(actual instanceof Uint8Array ? actual : actual.subarray(), reason).to.deep.equal(expected instanceof Uint8Array ? expected : expected.subarray()) +} + +const expectEqualDataFrame = (actual: { header: FrameHeader, data?: Uint8Array | Uint8ArrayList }, expected: { header: FrameHeader, data?: Uint8Array | Uint8ArrayList }, reason = ''): void => { + expect(actual.header, reason + ' header').to.deep.equal(expected.header) + if (actual.data == null && expected.data != null) { + expect.fail('actual has no data but expected does') + } + if (actual.data != null && expected.data == null) { + expect.fail('actual has data but expected does not') + } + if (actual.data != null && expected.data != null) { + expectEqualBytes(actual.data, expected.data, reason + ' data?: string') + } +} + +const expectEqualDataFrames = (actual: Array<{ header: FrameHeader, data?: Uint8Array | Uint8ArrayList }>, expected: Array<{ header: FrameHeader, data?: Uint8Array | Uint8ArrayList }>): void => { + if (actual.length !== expected.length) { + expect.fail('actual') + } + for (let i = 0; i < actual.length; i++) { + expectEqualDataFrame(actual[i], expected[i], String(i)) + } +} + +const dataFrame = (length: number): { header: FrameHeader, data: Uint8Array } => ({ + header: { type: FrameType.Data, flag: 0, streamID: 1, length }, + data: data(length) +}) + +export const randomRanges = (length: number): number[][] => { + const indices = [] + let i = 0 + let j = 0 + while (i < length) { + j = i + i += Math.floor(Math.random() * length) + indices.push([j, i]) + } + return indices +} + +describe('Decoder internals', () => { + describe('readHeader', () => { + const frame = frames[0] + const d = new Decoder() + + afterEach(() => { + d['buffer'].consume(d['buffer'].length) + }) + + it('should handle an empty buffer', async () => { + expect(d['buffer'].length, 'a freshly created decoder should have an empty buffer').to.equal(0) + expect(all(d.emitFrames(new Uint8Array()))).to.be.empty('an empty buffer should read no header') + }) + + it('should handle buffer length == header length', async () => { + expect(all(d.emitFrames(encodeHeader(frame.header)))).to.deep.equal([frame]) + expect(d['buffer'].length, 'the buffer should be fully drained').to.equal(0) + }) + + it('should handle buffer length < header length', async () => { + const upTo = 2 + + const buf = encodeHeader(frame.header) + + expect(all(d.emitFrames(buf.slice(0, upTo)))).to.be.empty('an buffer that has insufficient bytes should read no header') + expect(d['buffer'].length, 'a buffer that has insufficient bytes should not be consumed').to.equal(upTo) + + expect(all(d.emitFrames(buf.slice(upTo)))).to.deep.equal([frame], 'the decoded header should match the input') + expect(d['buffer'].length, 'the buffer should be fully drained').to.equal(0) + }) + + it('should handle buffer length > header length', async () => { + const more = 10 + + const buf = new Uint8ArrayList( + encodeHeader(frame.header), + new Uint8Array(more) + ) + + expect(all(d.emitFrames(buf.subarray()))).to.deep.equal([frame], 'the decoded header should match the input') + expect(d['buffer'].length, 'the buffer should be partially drained').to.equal(more) + }) + }) +}) + +describe('Decoder', () => { + describe('emitFrames', () => { + let d: Decoder + + beforeEach(() => { + d = new Decoder() + }) + + it('should emit frames from source chunked by frame', async () => { + const input = new Uint8ArrayList() + const expected = [] + for (const [i, frame] of frames.entries()) { + input.append(encodeHeader(frame.header)) + expected.push(frame) + + // sprinkle in more data frames + if (i % 2 === 1) { + const df = dataFrame(i * 100) + input.append(encodeHeader(df.header)) + input.append(df.data) + expected.push(df) + } + } + + const actual = all(d.emitFrames(input.subarray())) + + expectEqualDataFrames(actual, expected) + }) + + it('should emit frames from source chunked by partial frame', async () => { + const chunkSize = 5 + const input = new Uint8ArrayList() + const expected = [] + for (const [i, frame] of frames.entries()) { + const encoded = encodeHeader(frame.header) + for (let i = 0; i < encoded.length; i += chunkSize) { + input.append(encoded.slice(i, i + chunkSize)) + } + expected.push(frame) + + // sprinkle in more data frames + if (i % 2 === 1) { + const df = dataFrame(i * 100) + const encoded = Uint8Array.from([...encodeHeader(df.header), ...df.data]) + for (let i = 0; i < encoded.length; i += chunkSize) { + input.append(encoded.slice(i, i + chunkSize)) + } + expected.push(df) + } + } + + const actual = all(d.emitFrames(input.subarray())) + + expectEqualDataFrames(actual, expected) + }) + + it('should emit frames from source chunked by multiple frames', async () => { + const input = new Uint8ArrayList() + const expected = [] + for (let i = 0; i < frames.length; i++) { + const encoded1 = encodeHeader(frames[i].header) + expected.push(frames[i]) + + i++ + const encoded2 = encodeHeader(frames[i].header) + expected.push(frames[i]) + + // sprinkle in more data frames + const df = dataFrame(i * 100) + const encoded3 = Uint8Array.from([...encodeHeader(df.header), ...df.data]) + expected.push(df) + + const encodedChunk = new Uint8Array(encoded1.length + encoded2.length + encoded3.length) + encodedChunk.set(encoded1, 0) + encodedChunk.set(encoded2, encoded1.length) + encodedChunk.set(encoded3, encoded1.length + encoded2.length) + + input.append(encodedChunk) + } + + const actual = all(d.emitFrames(input.subarray())) + + expectEqualDataFrames(actual, expected) + }) + + it('should emit frames from source chunked chaotically', async () => { + const input = new Uint8ArrayList() + const expected = [] + const encodedFrames = [] + for (const [i, frame] of frames.entries()) { + encodedFrames.push(encodeHeader(frame.header)) + expected.push(frame) + + // sprinkle in more data frames + if (i % 2 === 1) { + const df = dataFrame(i * 100) + encodedFrames.push(encodeHeader(df.header)) + encodedFrames.push(df.data) + expected.push(df) + } + } + + // create a single byte array of all frames to send + // so that we can chunk them chaotically + const encoded = new Uint8Array(encodedFrames.reduce((a, b) => a + b.length, 0)) + let i = 0 + for (const e of encodedFrames) { + encoded.set(e, i) + i += e.length + } + + for (const [i, j] of randomRanges(encoded.length)) { + input.append(encoded.slice(i, j)) + } + + const actual = all(d.emitFrames(input.subarray())) + + expectEqualDataFrames(actual, expected) + }) + }) +}) diff --git a/packages/stream-multiplexer-yamux/test/muxer.spec.ts b/packages/stream-multiplexer-yamux/test/muxer.spec.ts new file mode 100644 index 0000000000..010ff7f8b3 --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/muxer.spec.ts @@ -0,0 +1,136 @@ +/* eslint-env mocha */ + +import { multiaddrConnectionPair } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import { YamuxMuxer } from '../src/muxer.ts' +import { sleep } from './util.js' +import type { MultiaddrConnection } from '@libp2p/interface' + +describe('muxer', () => { + let client: YamuxMuxer + let server: YamuxMuxer + let outboundConnection: MultiaddrConnection + let inboundConnection: MultiaddrConnection + + beforeEach(() => { + ([outboundConnection, inboundConnection] = multiaddrConnectionPair()) + client = new YamuxMuxer(outboundConnection) + server = new YamuxMuxer(inboundConnection) + }) + + afterEach(async () => { + if (client != null) { + await client.close() + } + + if (server != null) { + await server.close() + } + }) + + it('test repeated close', async () => { + // inspect logs to ensure its only closed once + await client.close() + await client.close() + await client.close() + }) + + it('test client<->client', async () => { + server['client'] = true + + await client.createStream().catch(() => {}) + await server.createStream().catch(() => {}) + + await sleep(20) + + expect(client).to.have.property('status', 'closed') + expect(server).to.have.property('status', 'closed') + }) + + it('test server<->server', async () => { + client['client'] = false + + await client.createStream().catch(() => {}) + await server.createStream().catch(() => {}) + + await sleep(20) + + expect(client).to.have.property('status', 'closed') + expect(server).to.have.property('status', 'closed') + }) + + it('test ping', async () => { + inboundConnection.pause() + const clientRTT = client.ping() + await sleep(10) + inboundConnection.resume() + await expect(clientRTT).to.eventually.not.equal(0) + + outboundConnection.pause() + const serverRTT = server.ping() + await sleep(10) + outboundConnection.resume() + expect(await serverRTT).to.not.equal(0) + }) + + it('test multiple simultaneous pings', async () => { + inboundConnection.pause() + const promise = [ + client.ping(), + client.ping(), + client.ping() + ] + await sleep(10) + inboundConnection.resume() + + const clientRTTs = await Promise.all(promise) + expect(clientRTTs[0]).to.not.equal(0) + expect(clientRTTs[0]).to.equal(clientRTTs[1]) + expect(clientRTTs[1]).to.equal(clientRTTs[2]) + + expect(client['nextPingID']).to.equal(1) + + await client.close() + }) + + it('test go away', async () => { + await client.close() + + await expect(client.createStream()).to.eventually.be.rejected() + .with.property('name', 'MuxerClosedError', 'should not be able to open a stream after close') + }) + + it('test keep alive', async () => { + client['keepAlive']?.setInterval(10) + + await sleep(1000) + + expect(client['nextPingID']).to.be.gt(2) + }) + + it('test max inbound streams', async () => { + server['config']['maxInboundStreams'] = 1 + + await client.createStream() + await client.createStream() + await sleep(10) + + expect(server.streams.length).to.equal(1) + expect(client.streams.length).to.equal(1) + }) + + it('test max outbound streams', async () => { + client['config']['maxOutboundStreams'] = 1 + + await client.createStream() + await sleep(10) + + try { + await client.createStream() + expect.fail('stream creation should fail if exceeding maxOutboundStreams') + } catch (e) { + expect(server.streams.length).to.equal(1) + expect(client.streams.length).to.equal(1) + } + }) +}) diff --git a/packages/stream-multiplexer-yamux/test/stream.spec.ts b/packages/stream-multiplexer-yamux/test/stream.spec.ts new file mode 100644 index 0000000000..03986cca9a --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/stream.spec.ts @@ -0,0 +1,258 @@ +/* eslint-env mocha */ + +import { multiaddrConnectionPair, pipe } from '@libp2p/utils' +import { expect } from 'aegir/chai' +import drain from 'it-drain' +import { pushable } from 'it-pushable' +import { pEvent } from 'p-event' +import { defaultConfig } from '../src/config.js' +import { GoAwayCode } from '../src/frame.js' +import { YamuxMuxer } from '../src/muxer.ts' +import { StreamState } from '../src/stream.js' +import { sleep } from './util.js' +import type { MultiaddrConnection } from '@libp2p/interface' +import type { Pushable } from 'it-pushable' + +describe('stream', () => { + let inboundConnection: MultiaddrConnection + let outboundConnection: MultiaddrConnection + let client: YamuxMuxer + let server: YamuxMuxer + + beforeEach(() => { + ([inboundConnection, outboundConnection] = multiaddrConnectionPair()) + client = new YamuxMuxer(inboundConnection) + server = new YamuxMuxer(outboundConnection) + }) + + afterEach(async () => { + await client?.close() + await server?.close() + }) + + it('test send data - small', async () => { + const [ + s1, c1 + ] = await Promise.all([ + pEvent(server, 'stream').then(evt => evt.detail), + client.createStream() + ]) + + await Promise.all([ + Promise.resolve().then(async () => { + for (let i = 0; i < 10; i++) { + const sendMore = c1.send(new Uint8Array(256)) + + if (!sendMore) { + await pEvent(c1, 'drain') + } + } + + await c1.close() + }), + drain(s1) + ]) + + // the window capacities should have refilled via window updates as received data was consumed + expect(c1['sendWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + expect(s1['recvWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + }) + + it('test send data - large', async () => { + const [ + s1, c1 + ] = await Promise.all([ + pEvent(server, 'stream').then(evt => evt.detail), + client.createStream() + ]) + + await Promise.all([ + Promise.resolve().then(async () => { + // amount of data is greater than initial window size + // and each payload is also greater than the max message size + // this will payload chunking and also waiting for window updates before + // continuing to send + for (let i = 0; i < 10; i++) { + const sendMore = c1.send(new Uint8Array(defaultConfig.initialStreamWindowSize)) + + if (!sendMore) { + await pEvent(c1, 'drain') + } + } + + await c1.close() + }), + drain(s1) + ]) + + // the window capacities should have refilled via window updates as received data was consumed + expect(c1['sendWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + expect(s1['recvWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + }) + + it('test send data - large with increasing recv window size', async () => { + const [ + s1, c1 + ] = await Promise.all([ + pEvent(server, 'stream').then(evt => evt.detail), + client.createStream(), + server.ping() + ]) + + await Promise.all([ + Promise.resolve().then(async () => { + // amount of data is greater than initial window size + // and each payload is also greater than the max message size + // this will payload chunking and also waiting for window updates before + // continuing to send + for (let i = 0; i < 10; i++) { + const sendMore = c1.send(new Uint8Array(defaultConfig.initialStreamWindowSize)) + + if (!sendMore) { + await pEvent(c1, 'drain') + } + } + await c1.close() + }), + drain(s1) + ]) + + // the window capacities should have refilled via window updates as received data was consumed + expect(c1['sendWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + expect(s1['recvWindowCapacity']).to.be.gte(defaultConfig.initialStreamWindowSize) + }) + + it('test many streams', async () => { + for (let i = 0; i < 1000; i++) { + client.createStream() + } + await sleep(100) + + expect(client.streams.length).to.equal(1000) + expect(server.streams.length).to.equal(1000) + }) + + it('test many streams - ping pong', async () => { + server.addEventListener('stream', (evt) => { + // echo on incoming streams + pipe(evt.detail, evt.detail) + }) + + const numStreams = 10 + + const p: Array> = [] + for (let i = 0; i < numStreams; i++) { + client.createStream() + p.push(pushable()) + } + await sleep(100) + + for (let i = 0; i < numStreams; i++) { + const s = client.streams[i] + void pipe(p[i], s) + p[i].push(new Uint8Array(16)) + } + await sleep(100) + + expect(client.streams.length).to.equal(numStreams) + expect(server.streams.length).to.equal(numStreams) + + await client.close() + }) + + it('test stream close', async () => { + const c1 = await client.createStream() + await c1.close() + await sleep(5) + + expect(c1.state).to.equal(StreamState.Finished) + + const s1 = server.streams[0] + expect(s1).to.not.be.undefined() + expect(s1.state).to.equal(StreamState.SYNReceived) + }) + + it('test stream close read', async () => { + const c1 = await client.createStream() + await c1.closeRead() + await sleep(5) + + const s1 = server.streams[0] + expect(s1).to.not.be.undefined() + expect(s1.readStatus).to.equal('readable') + expect(s1.writeStatus).to.equal('writable') + }) + + it('test stream close write', async () => { + const c1 = await client.createStream() + await c1.close() + await sleep(5) + + expect(c1.readStatus).to.equal('closed') + expect(c1.writeStatus).to.equal('closed') + + const s1 = server.streams[0] + expect(s1).to.not.be.undefined() + expect(s1.readStatus).to.equal('closed') + expect(s1.writeStatus).to.equal('writable') + }) + + it('test window overflow', async () => { + const [ + s1, c1 + ] = await Promise.all([ + pEvent(server, 'stream').then(evt => evt.detail), + client.createStream() + ]) + + await expect( + Promise.all([ + (async () => { + const data = new Array(10).fill(new Uint8Array(s1['recvWindowCapacity'] * 2)) + + for (const buf of data) { + c1['config']['maxMessageSize'] = s1['recvWindowCapacity'] * 2 + c1['sendWindowCapacity'] = s1['recvWindowCapacity'] * 2 + const sendMore = c1.send(buf) + + if (!sendMore) { + await pEvent(c1, 'drain') + } + } + + await c1.close() + })(), + drain(s1) + ]) + ).to.eventually.be.rejected() + .with.property('name', 'ReceiveWindowExceededError') + + expect(client).to.have.property('remoteGoAway', GoAwayCode.ProtocolError) + expect(server).to.have.property('localGoAway', GoAwayCode.ProtocolError) + }) + + it('test stream sink error', async () => { + // don't let the server respond + inboundConnection.pause() + + const p = pushable() + const c1 = await client.createStream() + + pipe(p, c1) + + // send more data than the window size, will trigger a wait + p.push(new Uint8Array(defaultConfig.initialStreamWindowSize)) + p.push(new Uint8Array(defaultConfig.initialStreamWindowSize)) + + await sleep(10) + + // the client should fail to close gracefully because there is unsent data + // that will never be sent + await expect(client.close({ + signal: AbortSignal.timeout(10) + })).to.eventually.be.rejected() + + p.end() + inboundConnection.resume() + }) +}) diff --git a/packages/stream-multiplexer-yamux/test/util.ts b/packages/stream-multiplexer-yamux/test/util.ts new file mode 100644 index 0000000000..3eb3182e48 --- /dev/null +++ b/packages/stream-multiplexer-yamux/test/util.ts @@ -0,0 +1,3 @@ +export async function sleep (ms: number): Promise { + return new Promise(resolve => setTimeout(() => { resolve(ms) }, ms)) +} diff --git a/packages/stream-multiplexer-yamux/tsconfig.json b/packages/stream-multiplexer-yamux/tsconfig.json new file mode 100644 index 0000000000..13a3599639 --- /dev/null +++ b/packages/stream-multiplexer-yamux/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test" + ] +} diff --git a/packages/stream-multiplexer-yamux/typedoc.json b/packages/stream-multiplexer-yamux/typedoc.json new file mode 100644 index 0000000000..1c46781ee6 --- /dev/null +++ b/packages/stream-multiplexer-yamux/typedoc.json @@ -0,0 +1,8 @@ +{ + "readme": "none", + "entryPoints": [ + "./src/index.ts", + "./src/config.ts", + "./src/stream.ts" + ] +} diff --git a/packages/transport-circuit-relay-v2/package.json b/packages/transport-circuit-relay-v2/package.json index a4ec64c068..a5722b6c1f 100644 --- a/packages/transport-circuit-relay-v2/package.json +++ b/packages/transport-circuit-relay-v2/package.json @@ -55,8 +55,6 @@ "@multiformats/multiaddr": "^12.4.4", "@multiformats/multiaddr-matcher": "^2.0.0", "any-signal": "^4.1.1", - "it-protobuf-stream": "^2.0.2", - "it-stream-types": "^2.0.2", "main-event": "^1.0.1", "multiformats": "^13.3.6", "nanoid": "^5.1.5", @@ -67,17 +65,16 @@ "uint8arrays": "^5.1.0" }, "devDependencies": { - "@libp2p/interface-compliance-tests": "^6.4.16", "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", "delay": "^6.0.0", - "it-drain": "^3.0.9", + "it-all": "^3.0.9", + "it-merge": "^3.0.12", "it-pair": "^2.0.6", - "it-pushable": "^3.2.3", - "it-to-buffer": "^4.0.9", + "it-protobuf-stream": "^2.0.2", + "it-stream-types": "^2.0.2", "p-wait-for": "^5.0.2", "protons": "^7.6.1", - "race-signal": "^1.1.3", "sinon": "^20.0.0", "sinon-ts": "^2.0.0" }, diff --git a/packages/transport-circuit-relay-v2/src/server/index.ts b/packages/transport-circuit-relay-v2/src/server/index.ts index 1df2ed553b..8b25dd7caf 100644 --- a/packages/transport-circuit-relay-v2/src/server/index.ts +++ b/packages/transport-circuit-relay-v2/src/server/index.ts @@ -1,8 +1,8 @@ import { publicKeyToProtobuf } from '@libp2p/crypto/keys' import { peerIdFromMultihash } from '@libp2p/peer-id' import { RecordEnvelope } from '@libp2p/peer-record' +import { pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter, setMaxListeners } from 'main-event' import * as Digest from 'multiformats/hashes/digest' import { @@ -21,11 +21,11 @@ import { ReservationVoucherRecord } from './reservation-voucher.js' import type { ReservationStoreInit } from './reservation-store.js' import type { CircuitRelayService, RelayReservation } from '../index.js' import type { Reservation } from '../pb/index.js' -import type { ComponentLogger, Logger, Connection, Stream, ConnectionGater, PeerId, PeerStore, Startable, PrivateKey, Metrics, AbortOptions, IncomingStreamData } from '@libp2p/interface' +import type { ComponentLogger, Logger, Connection, Stream, ConnectionGater, PeerId, PeerStore, Startable, PrivateKey, Metrics, AbortOptions } from '@libp2p/interface' import type { AddressManager, ConnectionManager, Registrar } from '@libp2p/interface-internal' import type { PeerMap } from '@libp2p/peer-collections' +import type { ProtobufStream } from '@libp2p/utils' import type { Multiaddr } from '@multiformats/multiaddr' -import type { ProtobufStream } from 'it-protobuf-stream' const isRelayAddr = (ma: Multiaddr): boolean => ma.protoCodes().includes(CIRCUIT_PROTO_CODE) @@ -135,6 +135,8 @@ class CircuitRelayServer extends TypedEventEmitter implements this.shutdownController = new AbortController() setMaxListeners(Infinity, this.shutdownController.signal) + + this.onHop = this.onHop.bind(this) } readonly [Symbol.toStringTag] = '@libp2p/circuit-relay-v2-server' @@ -151,11 +153,7 @@ class CircuitRelayServer extends TypedEventEmitter implements return } - await this.registrar.handle(RELAY_V2_HOP_CODEC, (data) => { - void this.onHop(data).catch(err => { - this.log.error(err) - }) - }, { + await this.registrar.handle(RELAY_V2_HOP_CODEC, this.onHop, { maxInboundStreams: this.maxInboundHopStreams, maxOutboundStreams: this.maxOutboundHopStreams, runOnLimitedConnection: true @@ -175,11 +173,14 @@ class CircuitRelayServer extends TypedEventEmitter implements this.started = false } - async onHop ({ connection, stream }: IncomingStreamData): Promise { + async onHop (stream: Stream, connection: Connection): Promise { this.log('received circuit v2 hop protocol stream from %p', connection.remotePeer) + const signal = AbortSignal.timeout(this.hopTimeout) + setMaxListeners(Infinity, signal) + const options = { - signal: AbortSignal.timeout(this.hopTimeout) + signal } const pbstr = pbStream(stream) @@ -422,7 +423,7 @@ class CircuitRelayServer extends TypedEventEmitter implements if (response == null) { this.log.error('could not read response from %p', connection.remotePeer) - await stream.close(options) + await stream.closeWrite(options) return } @@ -432,7 +433,7 @@ class CircuitRelayServer extends TypedEventEmitter implements } this.log('stop request failed with code %d', response.status) - await stream.close(options) + await stream.closeWrite(options) } get reservations (): PeerMap { diff --git a/packages/transport-circuit-relay-v2/src/transport/discovery.ts b/packages/transport-circuit-relay-v2/src/transport/discovery.ts index 302a172c68..5c4b1d03f3 100644 --- a/packages/transport-circuit-relay-v2/src/transport/discovery.ts +++ b/packages/transport-circuit-relay-v2/src/transport/discovery.ts @@ -1,4 +1,4 @@ -import { PeerQueue } from '@libp2p/utils/peer-queue' +import { PeerQueue } from '@libp2p/utils' import { anySignal } from 'any-signal' import { TypedEventEmitter, setMaxListeners } from 'main-event' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' diff --git a/packages/transport-circuit-relay-v2/src/transport/reservation-store.ts b/packages/transport-circuit-relay-v2/src/transport/reservation-store.ts index 28a8198135..07c8ec7bba 100644 --- a/packages/transport-circuit-relay-v2/src/transport/reservation-store.ts +++ b/packages/transport-circuit-relay-v2/src/transport/reservation-store.ts @@ -1,10 +1,8 @@ import { ListenError } from '@libp2p/interface' import { PeerMap } from '@libp2p/peer-collections' -import { createScalableCuckooFilter } from '@libp2p/utils/filters' -import { PeerQueue } from '@libp2p/utils/peer-queue' +import { createScalableCuckooFilter, PeerQueue, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { Circuit } from '@multiformats/multiaddr-matcher' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter, setMaxListeners } from 'main-event' import { nanoid } from 'nanoid' import { DEFAULT_MAX_RESERVATION_QUEUE_LENGTH, DEFAULT_RESERVATION_COMPLETION_TIMEOUT, DEFAULT_RESERVATION_CONCURRENCY, KEEP_ALIVE_TAG, RELAY_V2_HOP_CODEC } from '../constants.js' @@ -14,7 +12,7 @@ import { getExpirationMilliseconds } from '../utils.js' import type { Reservation } from '../pb/index.js' import type { AbortOptions, Libp2pEvents, ComponentLogger, Logger, PeerId, PeerStore, Startable, Metrics, Peer, Connection } from '@libp2p/interface' import type { ConnectionManager } from '@libp2p/interface-internal' -import type { Filter } from '@libp2p/utils/filters' +import type { Filter } from '@libp2p/utils' import type { TypedEventTarget } from 'main-event' // allow refreshing a relay reservation if it will expire in the next 10 minutes @@ -449,7 +447,7 @@ export class ReservationStore extends TypedEventEmitter throw err } finally { if (stream.status !== 'closed') { - await stream.close(options) + await stream.closeWrite(options) } } diff --git a/packages/transport-circuit-relay-v2/src/transport/stream-to-conn.ts b/packages/transport-circuit-relay-v2/src/transport/stream-to-conn.ts new file mode 100644 index 0000000000..9ac4771acd --- /dev/null +++ b/packages/transport-circuit-relay-v2/src/transport/stream-to-conn.ts @@ -0,0 +1,116 @@ +import { AbstractMultiaddrConnection } from '@libp2p/utils' +import { Uint8ArrayList } from 'uint8arraylist' +import type { AbortOptions, MultiaddrConnection, Stream } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult } from '@libp2p/utils' + +export interface StreamMultiaddrConnectionInit extends Omit { + stream: Stream + + /** + * A callback invoked when data is read from the stream + */ + onDataRead?(buf: Uint8ArrayList | Uint8Array): void + + /** + * A callback invoked when data is written to the stream + */ + onDataWrite?(buf: Uint8ArrayList | Uint8Array): void +} + +class StreamMultiaddrConnection extends AbstractMultiaddrConnection { + private stream: Stream + private init: StreamMultiaddrConnectionInit + + constructor (init: StreamMultiaddrConnectionInit) { + let closedWrite = false + + super({ + ...init, + direction: init.stream.direction, + log: init.log.newScope('stream-to-maconn') + }) + + this.init = init + this.stream = init.stream + + this.stream.addEventListener('close', (evt) => { + if (evt.error) { + close(true) + } else { + close() + } + }) + + // count incoming bytes + this.stream.addEventListener('message', (evt) => { + init.onDataRead?.(evt.data) + this.onData(evt.data) + }) + + this.stream.addEventListener('closeWrite', () => { + closedWrite = true + }) + + // piggyback on data send to count outgoing bytes + const send = this.stream.send.bind(this.stream) + this.stream.send = (buf: Uint8Array): boolean => { + return send(buf) + } + + const self = this + + function close (force?: boolean): void { + if (force === true) { + closedWrite = true + } + + if (closedWrite && self.timeline.close == null) { + self.close() + .catch(err => { + self.abort(err) + }) + } + } + } + + async sendClose (options?: AbortOptions): Promise { + await this.stream.closeWrite(options) + } + + sendData (data: Uint8ArrayList): SendResult { + this.init.onDataWrite?.(data) + + return { + sentBytes: data.byteLength, + canSendMore: this.stream.send(data) + } + } + + sendReset (): void { + this.stream.abort(new Error('An error occurred')) + } + + sendCloseWrite (options?: AbortOptions): Promise { + return this.stream.closeWrite(options) + } + + sendCloseRead (options?: AbortOptions): Promise { + return this.stream.closeRead(options) + } + + sendPause (): void { + this.stream.pause() + } + + sendResume (): void { + this.stream.resume() + } +} + +/** + * Convert a duplex iterable into a MultiaddrConnection. + * https://github.com/libp2p/interface-transport#multiaddrconnection + */ +export function streamToMaConnection (init: StreamMultiaddrConnectionInit): MultiaddrConnection { + return new StreamMultiaddrConnection(init) +} diff --git a/packages/transport-circuit-relay-v2/src/transport/transport.ts b/packages/transport-circuit-relay-v2/src/transport/transport.ts index 4f68693b8d..8eb7995c3c 100644 --- a/packages/transport-circuit-relay-v2/src/transport/transport.ts +++ b/packages/transport-circuit-relay-v2/src/transport/transport.ts @@ -1,10 +1,9 @@ import { DialError, InvalidMessageError, serviceCapabilities, serviceDependencies, start, stop, transportSymbol } from '@libp2p/interface' import { peerFilter } from '@libp2p/peer-collections' import { peerIdFromMultihash, peerIdFromString } from '@libp2p/peer-id' -import { streamToMaConnection } from '@libp2p/utils/stream-to-ma-conn' +import { pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { Circuit } from '@multiformats/multiaddr-matcher' -import { pbStream } from 'it-protobuf-stream' import { setMaxListeners } from 'main-event' import * as Digest from 'multiformats/hashes/digest' import { CustomProgressEvent } from 'progress-events' @@ -14,9 +13,9 @@ import { CircuitListen, CircuitSearch, LimitTracker } from '../utils.js' import { RelayDiscovery } from './discovery.js' import { createListener } from './listener.js' import { ReservationStore } from './reservation-store.js' +import { streamToMaConnection } from './stream-to-conn.js' import type { CircuitRelayTransportComponents, CircuitRelayTransportInit } from './index.js' -import type { Transport, CreateListenerOptions, Listener, Upgrader, ComponentLogger, Logger, Connection, Stream, ConnectionGater, PeerId, PeerStore, OutboundConnectionUpgradeEvents, DialTransportOptions, OpenConnectionProgressEvents, IncomingStreamData } from '@libp2p/interface' -import type { AddressManager, ConnectionManager, Registrar, TransportManager } from '@libp2p/interface-internal' +import type { Transport, CreateListenerOptions, Listener, Logger, Connection, Stream, OutboundConnectionUpgradeEvents, DialTransportOptions, OpenConnectionProgressEvents } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' import type { ProgressEvent } from 'progress-events' @@ -50,17 +49,9 @@ export type CircuitRelayDialEvents = ProgressEvent<'circuit-relay:read-connect-response'> export class CircuitRelayTransport implements Transport { + private readonly components: CircuitRelayTransportComponents private readonly discovery?: RelayDiscovery - private readonly registrar: Registrar - private readonly peerStore: PeerStore - private readonly connectionManager: ConnectionManager - private readonly transportManager: TransportManager - private readonly peerId: PeerId - private readonly upgrader: Upgrader - private readonly addressManager: AddressManager - private readonly connectionGater: ConnectionGater public readonly reservationStore: ReservationStore - private readonly logger: ComponentLogger private readonly maxInboundStopStreams: number private readonly maxOutboundStopStreams?: number private started: boolean @@ -68,16 +59,8 @@ export class CircuitRelayTransport implements Transport private shutdownController: AbortController constructor (components: CircuitRelayTransportComponents, init: CircuitRelayTransportInit = {}) { + this.components = components this.log = components.logger.forComponent('libp2p:circuit-relay:transport') - this.registrar = components.registrar - this.peerStore = components.peerStore - this.connectionManager = components.connectionManager - this.transportManager = components.transportManager - this.logger = components.logger - this.peerId = components.peerId - this.upgrader = components.upgrader - this.addressManager = components.addressManager - this.connectionGater = components.connectionGater this.maxInboundStopStreams = init.maxInboundStopStreams ?? defaults.maxInboundStopStreams this.maxOutboundStopStreams = init.maxOutboundStopStreams ?? defaults.maxOutboundStopStreams this.shutdownController = new AbortController() @@ -132,13 +115,13 @@ export class CircuitRelayTransport implements Transport this.shutdownController = new AbortController() setMaxListeners(Infinity, this.shutdownController.signal) - await this.registrar.handle(RELAY_V2_STOP_CODEC, (data) => { - const signal = this.upgrader.createInboundAbortSignal(this.shutdownController.signal) + await this.components.registrar.handle(RELAY_V2_STOP_CODEC, (stream, connection) => { + const signal = this.components.upgrader.createInboundAbortSignal(this.shutdownController.signal) - void this.onStop(data, signal) + void this.onStop(stream, connection, signal) .catch(err => { this.log.error('error while handling STOP protocol', err) - data.stream.abort(err) + stream.abort(err) }) .finally(() => { signal.clear() @@ -157,7 +140,7 @@ export class CircuitRelayTransport implements Transport async stop (): Promise { this.shutdownController.abort() await stop(this.discovery, this.reservationStore) - await this.registrar.unhandle(RELAY_V2_STOP_CODEC) + await this.components.registrar.unhandle(RELAY_V2_STOP_CODEC) this.started = false } @@ -188,16 +171,16 @@ export class CircuitRelayTransport implements Transport const relayPeer = peerIdFromString(relayId) const destinationPeer = peerIdFromString(destinationId) - const relayConnections = this.connectionManager.getConnections(relayPeer) + const relayConnections = this.components.connectionManager.getConnections(relayPeer) let relayConnection = relayConnections[0] if (relayConnection == null) { - await this.peerStore.merge(relayPeer, { + await this.components.peerStore.merge(relayPeer, { multiaddrs: [relayAddr] }) options.onProgress?.(new CustomProgressEvent('circuit-relay:open-connection')) - relayConnection = await this.connectionManager.openConnection(relayPeer, options) + relayConnection = await this.components.connectionManager.openConnection(relayPeer, options) } else { options.onProgress?.(new CustomProgressEvent('circuit-relay:reuse-connection')) } @@ -232,13 +215,13 @@ export class CircuitRelayTransport implements Transport const maConn = streamToMaConnection({ stream: pbstr.unwrap(), remoteAddr: ma, - localAddr: relayAddr.encapsulate(`/p2p-circuit/p2p/${this.peerId.toString()}`), - log: this.log, + localAddr: relayAddr.encapsulate(`/p2p-circuit/p2p/${this.components.peerId.toString()}`), onDataRead: limits.onData, - onDataWrite: limits.onData + onDataWrite: limits.onData, + log: stream.log.newScope('circuit-relay:relayed') }) - const conn = await this.upgrader.upgradeOutbound(maConn, { + const conn = await this.components.upgrader.upgradeOutbound(maConn, { ...options, limits: limits.getLimits() }) @@ -259,11 +242,11 @@ export class CircuitRelayTransport implements Transport */ createListener (options: CreateListenerOptions): Listener { return createListener({ - peerId: this.peerId, - connectionManager: this.connectionManager, - addressManager: this.addressManager, + peerId: this.components.peerId, + connectionManager: this.components.connectionManager, + addressManager: this.components.addressManager, reservationStore: this.reservationStore, - logger: this.logger + logger: this.components.logger }) } @@ -292,11 +275,11 @@ export class CircuitRelayTransport implements Transport /** * An incoming STOP request means a remote peer wants to dial us via a relay */ - async onStop ({ connection, stream }: IncomingStreamData, signal: AbortSignal): Promise { + async onStop (stream: Stream, connection: Connection, signal: AbortSignal): Promise { if (!this.reservationStore.hasReservation(connection.remotePeer)) { try { this.log('dialed via relay we did not have a reservation on, start listening on that relay address') - await this.transportManager.listen([connection.remoteAddr.encapsulate('/p2p-circuit')]) + await this.components.transportManager.listen([connection.remoteAddr.encapsulate('/p2p-circuit')]) } catch (err: any) { // failed to refresh our hitherto unknown relay reservation but allow the connection attempt anyway this.log.error('failed to listen on a relay peer we were dialed via but did not have a reservation on', err) @@ -315,7 +298,7 @@ export class CircuitRelayTransport implements Transport await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.MALFORMED_MESSAGE }, { signal }) - await stream.close() + await stream.closeWrite() return } @@ -325,7 +308,7 @@ export class CircuitRelayTransport implements Transport await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.UNEXPECTED_MESSAGE }, { signal }) - await stream.close() + await stream.closeWrite() return } @@ -334,7 +317,7 @@ export class CircuitRelayTransport implements Transport await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.MALFORMED_MESSAGE }, { signal }) - await stream.close({ + await stream.closeWrite({ signal }) return @@ -342,12 +325,12 @@ export class CircuitRelayTransport implements Transport const remotePeerId = peerIdFromMultihash(Digest.decode(request.peer.id)) - if ((await this.connectionGater.denyInboundRelayedConnection?.(connection.remotePeer, remotePeerId)) === true) { + if ((await this.components.connectionGater.denyInboundRelayedConnection?.(connection.remotePeer, remotePeerId)) === true) { this.log.error('connection gater denied inbound relayed connection from %p', connection.remotePeer) await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.PERMISSION_DENIED }, { signal }) - await stream.close({ + await stream.closeWrite({ signal }) return @@ -360,17 +343,17 @@ export class CircuitRelayTransport implements Transport const limits = new LimitTracker(request.limit) const remoteAddr = connection.remoteAddr.encapsulate(`/p2p-circuit/p2p/${remotePeerId.toString()}`) - const localAddr = this.addressManager.getAddresses()[0] + const localAddr = this.components.addressManager.getAddresses()[0] const maConn = streamToMaConnection({ stream: pbstr.unwrap().unwrap(), remoteAddr, localAddr, - log: this.log, onDataRead: limits.onData, - onDataWrite: limits.onData + onDataWrite: limits.onData, + log: stream.log.newScope('circuit-relay:relayed') }) - await this.upgrader.upgradeInbound(maConn, { + await this.components.upgrader.upgradeInbound(maConn, { limits: limits.getLimits(), signal }) diff --git a/packages/transport-circuit-relay-v2/src/utils.ts b/packages/transport-circuit-relay-v2/src/utils.ts index 8ac31b0d09..93cbc2d927 100644 --- a/packages/transport-circuit-relay-v2/src/utils.ts +++ b/packages/transport-circuit-relay-v2/src/utils.ts @@ -1,3 +1,4 @@ +import { setMaxListeners } from '@libp2p/interface' import { CODE_P2P_CIRCUIT } from '@multiformats/multiaddr' import { P2P } from '@multiformats/multiaddr-matcher' import { fmt, code, and } from '@multiformats/multiaddr-matcher/utils' @@ -7,35 +8,20 @@ import { sha256 } from 'multiformats/hashes/sha2' import { DurationLimitError, TransferLimitError } from './errors.js' import type { RelayReservation } from './index.js' import type { Limit } from './pb/index.js' -import type { ConnectionLimits, LoggerOptions, Stream } from '@libp2p/interface' -import type { Source } from 'it-stream-types' +import type { ConnectionLimits, LoggerOptions, Stream, MessageStream } from '@libp2p/interface' import type { Uint8ArrayList } from 'uint8arraylist' -async function * countStreamBytes (source: Source, limit: { remaining: bigint }, options: LoggerOptions): AsyncGenerator { +function countStreamBytes (source: MessageStream, limit: { remaining: bigint }, options: LoggerOptions): void { const limitBytes = limit.remaining - for await (const buf of source) { - const len = BigInt(buf.byteLength) - - if ((limit.remaining - len) < 0) { - // this is a safe downcast since len is guarantee to be in the range for a number - const remaining = Number(limit.remaining) - limit.remaining = 0n - - try { - if (remaining !== 0) { - yield buf.subarray(0, remaining) - } - } catch (err: any) { - options.log.error(err) - } + source.addEventListener('message', (evt) => { + const len = BigInt(evt.data.byteLength) + limit.remaining -= len - throw new TransferLimitError(`data limit of ${limitBytes} bytes exceeded`) + if (limit.remaining < 0) { + source.abort(new TransferLimitError(`data limit of ${limitBytes} bytes exceeded`)) } - - limit.remaining -= len - yield buf - } + }) } export function createLimitedRelay (src: Stream, dst: Stream, abortSignal: AbortSignal, reservation: RelayReservation, options: LoggerOptions): void { @@ -49,7 +35,9 @@ export function createLimitedRelay (src: Stream, dst: Stream, abortSignal: Abort if (reservation.limit?.duration != null) { options.log('limiting relayed connection duration to %dms', reservation.limit.duration) - signals.push(AbortSignal.timeout(reservation.limit.duration)) + const durationSignal = AbortSignal.timeout(reservation.limit.duration) + setMaxListeners(Infinity, durationSignal) + signals.push(durationSignal) } const signal = anySignal(signals) @@ -65,50 +53,44 @@ export function createLimitedRelay (src: Stream, dst: Stream, abortSignal: Abort } } - queueMicrotask(() => { - const onAbort = (): void => { - options.log('relayed connection reached time limit') - dst.abort(new DurationLimitError(`duration limit of ${reservation.limit?.duration} ms exceeded`)) - } + const onAbort = (): void => { + const err = new DurationLimitError(`duration limit of ${reservation.limit?.duration} ms exceeded`) + dst.abort(err) + src.abort(err) + } + signal.addEventListener('abort', onAbort, { once: true }) - signal.addEventListener('abort', onAbort, { once: true }) + if (dataLimit != null) { + countStreamBytes(dst, dataLimit, options) + countStreamBytes(src, dataLimit, options) + } - void dst.sink(dataLimit == null ? src.source : countStreamBytes(src.source, dataLimit, options)) - .catch(err => { - options.log.error('error while relaying streams src -> dst', err) - abortStreams(err) - }) - .finally(() => { - srcDstFinished = true + src.addEventListener('close', (evt) => { + if (evt.error != null) { + options.log.error('error while relaying streams src -> dst - %e', evt.error) + abortStreams(evt.error) + } else { + srcDstFinished = true + } - if (dstSrcFinished) { - signal.removeEventListener('abort', onAbort) - signal.clear() - } - }) + if (dstSrcFinished) { + signal.removeEventListener('abort', onAbort) + signal.clear() + } }) - queueMicrotask(() => { - const onAbort = (): void => { - options.log('relayed connection reached time limit') - src.abort(new DurationLimitError(`duration limit of ${reservation.limit?.duration} ms exceeded`)) + dst.addEventListener('close', (evt) => { + if (evt.error != null) { + options.log.error('error while relaying streams dst -> src - %e', evt.error) + abortStreams(evt.error) + } else { + dstSrcFinished = true } - signal.addEventListener('abort', onAbort, { once: true }) - - void src.sink(dataLimit == null ? dst.source : countStreamBytes(dst.source, dataLimit, options)) - .catch(err => { - options.log.error('error while relaying streams dst -> src', err) - abortStreams(err) - }) - .finally(() => { - dstSrcFinished = true - - if (srcDstFinished) { - signal.removeEventListener('abort', onAbort) - signal.clear() - } - }) + if (srcDstFinished) { + signal.removeEventListener('abort', onAbort) + signal.clear() + } }) } diff --git a/packages/transport-circuit-relay-v2/test/hop.spec.ts b/packages/transport-circuit-relay-v2/test/hop.spec.ts index d02bf036a9..a9e8f8ac0a 100644 --- a/packages/transport-circuit-relay-v2/test/hop.spec.ts +++ b/packages/transport-circuit-relay-v2/test/hop.spec.ts @@ -1,14 +1,11 @@ -/* eslint-disable max-nested-callbacks */ - import { generateKeyPair } from '@libp2p/crypto/keys' import { isStartable } from '@libp2p/interface' -import { mockRegistrar, mockUpgrader, mockNetwork, mockConnectionManager } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' import { PeerMap } from '@libp2p/peer-collections' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter } from 'main-event' import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' @@ -52,7 +49,7 @@ describe('circuit-relay hop protocol', function () { const privateKey = await generateKeyPair('Ed25519') const peerId = peerIdFromPrivateKey(privateKey) - const registrar = mockRegistrar() + const registrar = stubInterface() const connections = new PeerMap() const octet = peerIndex + 100 @@ -63,7 +60,9 @@ describe('circuit-relay hop protocol', function () { addressManager.getAddresses.returns([ ma ]) - const peerStore = stubInterface() + const peerStore = stubInterface({ + all: async () => [] + }) const events = new TypedEventEmitter() events.addEventListener('connection:open', (evt) => { @@ -75,16 +74,9 @@ describe('circuit-relay hop protocol', function () { connections.delete(conn.remotePeer) }) - const connectionManager = mockConnectionManager({ - peerId, - registrar, - events - }) + const connectionManager = stubInterface() - const upgrader = mockUpgrader({ - registrar, - events - }) + const upgrader = stubInterface() const connectionGater = {} @@ -136,7 +128,6 @@ describe('circuit-relay hop protocol', function () { logger: defaultLogger() } - mockNetwork.addNode(node) nodes.push(node) return node @@ -200,8 +191,6 @@ describe('circuit-relay hop protocol', function () { await node.circuitRelayTransport.stop() } } - - mockNetwork.reset() }) describe('reserve', function () { diff --git a/packages/transport-circuit-relay-v2/test/stop.spec.ts b/packages/transport-circuit-relay-v2/test/stop.spec.ts index f6596076c7..a04d02ca4d 100644 --- a/packages/transport-circuit-relay-v2/test/stop.spec.ts +++ b/packages/transport-circuit-relay-v2/test/stop.spec.ts @@ -2,15 +2,13 @@ import { generateKeyPair } from '@libp2p/crypto/keys' import { isStartable } from '@libp2p/interface' -import { mockStream } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import { anySignal } from 'any-signal' import delay from 'delay' -import { duplexPair } from 'it-pair/duplex' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter } from 'main-event' import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' @@ -81,15 +79,9 @@ describe('circuit-relay stop protocol', function () { handler = components.registrar.handle.getCall(0).args[1] - const [localDuplex, remoteDuplex] = duplexPair() + ;[localStream, remoteStream] = await streamPair() - localStream = mockStream(localDuplex) - remoteStream = mockStream(remoteDuplex) - - handler({ - stream: remoteStream, - connection: stubInterface() - }) + handler(remoteStream, stubInterface()) pbStr = pbStream(localStream).pb(StopMessage) }) @@ -187,10 +179,7 @@ describe('circuit-relay stop protocol', function () { components.transportManager.listen.returns(Promise.resolve()) - void transport.onStop({ - connection, - stream: remoteStream - }, AbortSignal.timeout(5_000)) + void transport.onStop(remoteStream, connection, AbortSignal.timeout(5_000)) await pbStr.write({ type: StopMessage.Type.CONNECT, diff --git a/packages/utils/test/stream-to-ma-conn.spec.ts b/packages/transport-circuit-relay-v2/test/transport/stream-to-conn.spec.ts similarity index 71% rename from packages/utils/test/stream-to-ma-conn.spec.ts rename to packages/transport-circuit-relay-v2/test/transport/stream-to-conn.spec.ts index 8c58331b2e..c871609338 100644 --- a/packages/utils/test/stream-to-ma-conn.spec.ts +++ b/packages/transport-circuit-relay-v2/test/transport/stream-to-conn.spec.ts @@ -3,33 +3,29 @@ import { defaultLogger, logger } from '@libp2p/logger' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import all from 'it-all' import { pair } from 'it-pair' -import { pipe } from 'it-pipe' +import { stubInterface } from 'sinon-ts' +import { Uint8ArrayList } from 'uint8arraylist' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { streamToMaConnection } from '../src/stream-to-ma-conn.js' +import { streamToMaConnection } from '../../src/transport/stream-to-conn.js' import type { Stream } from '@libp2p/interface' import type { Duplex, Source } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' function toMuxedStream (stream: Duplex, Source, Promise>): Stream { - const muxedStream: Stream = { + const muxedStream: Stream = stubInterface({ ...stream, - close: async () => {}, - closeRead: async () => {}, closeWrite: async () => {}, abort: () => {}, direction: 'outbound', timeline: { open: Date.now() }, - metadata: {}, id: `muxed-stream-${Math.random()}`, status: 'open', - readStatus: 'ready', - writeStatus: 'ready', + readStatus: 'readable', + writeStatus: 'writable', log: logger('muxed-stream') - } + }) return muxedStream } @@ -45,18 +41,17 @@ describe('Convert stream into a multiaddr connection', () => { stream: toMuxedStream(stream), localAddr, remoteAddr, - log: defaultLogger().forComponent('test') + log: defaultLogger().forComponent('stream-to-maconn') }) expect(maConn).to.exist() - expect(maConn.sink).to.exist() - expect(maConn.source).to.exist() + expect(maConn.send).to.exist() expect(maConn.remoteAddr).to.eql(remoteAddr) expect(maConn.timeline).to.exist() expect(maConn.timeline.open).to.exist() expect(maConn.timeline.close).to.not.exist() - await maConn.close() + await maConn.closeWrite() expect(maConn.timeline.close).to.exist() }) @@ -66,19 +61,19 @@ describe('Convert stream into a multiaddr connection', () => { stream: toMuxedStream(stream), localAddr, remoteAddr, - log: defaultLogger().forComponent('test') + log: defaultLogger().forComponent('stream-to-maconn') + }) + + const streamData: Array = [] + maConn.addEventListener('message', (evt) => { + streamData.push(evt.data) }) const data = uint8ArrayFromString('hey') - const streamData = await pipe( - async function * () { - yield data - }, - maConn, - async (source) => all(source) - ) + maConn.send(data) + await maConn.closeWrite() - expect(streamData).to.eql([data]) + expect(streamData).to.deep.equal([new Uint8ArrayList(data)]) // underlying stream end closes the connection expect(maConn.timeline.close).to.exist() }) diff --git a/packages/transport-circuit-relay-v2/test/utils.spec.ts b/packages/transport-circuit-relay-v2/test/utils.spec.ts index e10955b762..0592dda835 100644 --- a/packages/transport-circuit-relay-v2/test/utils.spec.ts +++ b/packages/transport-circuit-relay-v2/test/utils.spec.ts @@ -1,14 +1,12 @@ /* eslint-env mocha */ -import { mockStream } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger } from '@libp2p/logger' +import { streamPair } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import delay from 'delay' -import drain from 'it-drain' -import { pushable } from 'it-pushable' -import toBuffer from 'it-to-buffer' -import { raceSignal } from 'race-signal' +import all from 'it-all' +import merge from 'it-merge' import { retimeableSignal } from 'retimeable-signal' import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' @@ -17,7 +15,6 @@ import { fromString as uint8arrayFromString } from 'uint8arrays/from-string' import { createLimitedRelay, getExpirationMilliseconds, LimitTracker, namespaceToCid } from '../src/utils.js' import type { Limit, RelayReservation } from '../src/index.js' import type { Logger } from '@libp2p/interface' -import type { Duplex, Source } from 'it-stream-types' describe('circuit-relay utils', () => { function createReservation (limit?: Limit): RelayReservation { @@ -30,38 +27,10 @@ describe('circuit-relay utils', () => { } it('should create relay', async () => { - const received = pushable() - - const local: Duplex, any> = { - source: (async function * () { - await delay(10) - yield uint8arrayFromString('0123') - await delay(10) - yield uint8arrayFromString('4567') - await delay(10) - yield uint8arrayFromString('8912') - }()), - sink: async (source) => { - await drain(source) - } - } - const remote: Duplex, any> = { - source: [], - sink: async (source) => { - try { - for await (const buf of source) { - received.push(buf.subarray()) - } - } finally { - received.end() - } - } - } - const controller = new AbortController() - const localStream = mockStream(local) - const remoteStream = mockStream(remote) - + const [localStream, remoteStream] = await streamPair({ + delay: 10 + }) const localStreamAbortSpy = Sinon.spy(localStream, 'abort') const remoteStreamAbortSpy = Sinon.spy(remoteStream, 'abort') @@ -69,47 +38,27 @@ describe('circuit-relay utils', () => { log: stubInterface() }) - expect(await toBuffer(received)).to.have.property('byteLength', 12) + localStream.send(uint8arrayFromString('0123')) + localStream.send(uint8arrayFromString('4567')) + localStream.send(uint8arrayFromString('8912')) + await localStream.closeWrite() + + const received = await all(remoteStream) + + expect(new Uint8ArrayList(...received)).to.have.property('byteLength', 12) expect(localStreamAbortSpy).to.have.property('called', false) expect(remoteStreamAbortSpy).to.have.property('called', false) }) it('should create data limited relay', async () => { - const received = pushable() - - const local: Duplex, any> = { - source: (async function * () { - await delay(10) - yield uint8arrayFromString('0123') - await delay(10) - yield uint8arrayFromString('4567') - await delay(10) - }()), - sink: async (source) => { - await drain(source) - } - } - const remote: Duplex, any> = { - source: [], - sink: async (source) => { - try { - for await (const buf of source) { - received.push(buf.subarray()) - } - } finally { - received.end() - } - } - } - + const [localStream, remoteStream] = await streamPair({ + delay: 10 + }) const controller = new AbortController() const limit = { data: 5n } - const localStream = mockStream(local) - const remoteStream = mockStream(remote) - const localStreamAbortSpy = Sinon.spy(localStream, 'abort') const remoteStreamAbortSpy = Sinon.spy(remoteStream, 'abort') @@ -117,59 +66,26 @@ describe('circuit-relay utils', () => { log: stubInterface() }) - expect(await toBuffer(received)).to.have.property('byteLength', 5) + localStream.send(uint8arrayFromString('0123')) + localStream.send(uint8arrayFromString('4567')) + await localStream.closeWrite() + + const received = await all(remoteStream) + + expect(new Uint8ArrayList(...received)).to.have.property('byteLength', 5) expect(localStreamAbortSpy).to.have.property('called', true) expect(remoteStreamAbortSpy).to.have.property('called', true) }) it('should create data limited relay that limits data in both directions', async () => { - const received = pushable() - - const local: Duplex, any> = { - source: (async function * () { - await delay(10) - yield uint8arrayFromString('0123') - await delay(10) - yield uint8arrayFromString('4567') - await delay(10) - }()), - sink: async (source) => { - try { - for await (const buf of source) { - received.push(buf.subarray()) - } - } finally { - received.end() - } - } - } - const remote: Duplex, any> = { - source: (async function * () { - await delay(10) - yield uint8arrayFromString('8912') - await delay(10) - yield uint8arrayFromString('3456') - await delay(10) - }()), - sink: async (source) => { - try { - for await (const buf of source) { - received.push(buf.subarray()) - } - } finally { - received.end() - } - } - } - + const [localStream, remoteStream] = await streamPair({ + delay: 10 + }) const controller = new AbortController() const limit = { data: 5n } - const localStream = mockStream(local) - const remoteStream = mockStream(remote) - const localStreamAbortSpy = Sinon.spy(localStream, 'abort') const remoteStreamAbortSpy = Sinon.spy(remoteStream, 'abort') @@ -177,50 +93,41 @@ describe('circuit-relay utils', () => { log: stubInterface() }) - expect(await toBuffer(received)).to.have.property('byteLength', 5) + localStream.send(uint8arrayFromString('0123')) + localStream.send(uint8arrayFromString('4567')) + await localStream.closeWrite() + + remoteStream.send(uint8arrayFromString('8912')) + remoteStream.send(uint8arrayFromString('3456')) + await localStream.closeWrite() + + const received = await all(merge(localStream, remoteStream)) + + expect(new Uint8ArrayList(...received)).to.have.property('byteLength', 5) expect(localStreamAbortSpy).to.have.property('called', true) expect(remoteStreamAbortSpy).to.have.property('called', true) }) it('should create time limited relay', async () => { - const received = pushable() const abortController = new AbortController() - - const local = { - source: (async function * () { - await raceSignal(delay(10), abortController.signal) - yield new Uint8ArrayList(Uint8Array.from([0, 1, 2, 3])) - await raceSignal(delay(5000), abortController.signal) - yield new Uint8ArrayList(Uint8Array.from([4, 5, 6, 7])) - }()), - sink: async (source: Source) => { - await drain(source) - } - } - const remote = { - source: (async function * () {}()), - sink: async (source: Source) => { - try { - for await (const buf of source) { - received.push(buf.subarray()) - } - } finally { - received.end() - } - } - } + const [localStream, remoteStream] = await streamPair({ + delay: 5_000 + }) const controller = new AbortController() const limit = { duration: 100 } - const localStream = mockStream(local) localStream.abort = () => { abortController.abort() } - const remoteStream = mockStream(remote) + localStream.send(uint8arrayFromString('0123')) + localStream.send(uint8arrayFromString('4567')) + await localStream.closeWrite() + + const received = await all(remoteStream) const localStreamAbortSpy = Sinon.spy(localStream, 'abort') const remoteStreamAbortSpy = Sinon.spy(remoteStream, 'abort') @@ -229,7 +136,7 @@ describe('circuit-relay utils', () => { log: defaultLogger().forComponent('test') }) - expect(await toBuffer(received)).to.have.property('byteLength', 4) + expect(new Uint8ArrayList(...received)).to.have.property('byteLength', 4) expect(localStreamAbortSpy).to.have.property('called', true) expect(remoteStreamAbortSpy).to.have.property('called', true) }) diff --git a/packages/transport-memory/package.json b/packages/transport-memory/package.json index f664b207b0..6e948013a7 100644 --- a/packages/transport-memory/package.json +++ b/packages/transport-memory/package.json @@ -45,6 +45,7 @@ }, "dependencies": { "@libp2p/interface": "^2.10.5", + "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "@multiformats/multiaddr-matcher": "^2.0.0", "@types/sinon": "^17.0.4", diff --git a/packages/transport-memory/src/connections.ts b/packages/transport-memory/src/connections.ts index 67fcefa539..7692d64c1a 100644 --- a/packages/transport-memory/src/connections.ts +++ b/packages/transport-memory/src/connections.ts @@ -45,11 +45,11 @@ import { ConnectionFailedError } from '@libp2p/interface' import { multiaddr } from '@multiformats/multiaddr' import delay from 'delay' -import map from 'it-map' import { pushable } from 'it-pushable' import { raceSignal } from 'race-signal' +import { pushableToMaConn } from './pushable-to-conn.ts' import type { MemoryTransportComponents, MemoryTransportInit } from './index.js' -import type { MultiaddrConnection, PeerId } from '@libp2p/interface' +import type { Logger, MultiaddrConnection, PeerId } from '@libp2p/interface' import type { Uint8ArrayList } from 'uint8arraylist' export const connections = new Map() @@ -63,106 +63,72 @@ interface MemoryConnectionInit extends MemoryTransportInit { address: string } +let connectionId = 0 + export class MemoryConnection { + public readonly latency: number + private readonly components: MemoryTransportComponents private readonly init: MemoryConnectionInit private readonly connections: Set - private readonly latency: number + private readonly log: Logger constructor (components: MemoryTransportComponents, init: MemoryConnectionInit) { this.components = components this.init = init this.connections = new Set() this.latency = init.latency ?? 0 + this.log = components.logger.forComponent('libp2p:memory') } async dial (dialingPeerId: PeerId, signal: AbortSignal): Promise { - const dialerPushable = pushable() - const listenerPushable = pushable() const self = this - const dialer: MultiaddrConnection = { - source: (async function * () { - yield * map(listenerPushable, async buf => { - if (self.latency > 0) { - await delay(self.latency) - } - - return buf - }) - })(), - sink: async (source) => { - for await (const buf of source) { - dialerPushable.push(buf) + let dialerEnded = false + let listenerEnded = false + + const dialerPushable = pushable({ + onEnd (err) { + dialerEnded = true + self.connections.delete(dialer) + + if (!listenerEnded) { + listenerPushable.end(err) } - }, - close: async () => { - dialerPushable.end() - this.connections.delete(dialer) - dialer.timeline.close = Date.now() - - listenerPushable.end() - this.connections.delete(listener) - listener.timeline.close = Date.now() - }, - abort: (err) => { - dialerPushable.end(err) - this.connections.delete(dialer) - dialer.timeline.close = Date.now() - - listenerPushable.end(err) - this.connections.delete(listener) - listener.timeline.close = Date.now() - }, - timeline: { - open: Date.now() - }, - remoteAddr: multiaddr(`${this.init.address}/p2p/${this.components.peerId}`), - log: this.components.logger.forComponent('libp2p:memory') - } - - const listener: MultiaddrConnection = { - source: (async function * () { - yield * map(dialerPushable, async buf => { - if (self.latency > 0) { - await delay(self.latency) - } - - return buf - }) - })(), - sink: async (source) => { - for await (const buf of source) { - listenerPushable.push(buf) + } + }) + const listenerPushable = pushable({ + onEnd (err) { + listenerEnded = true + self.connections.delete(listener) + + if (!dialerEnded) { + dialerPushable.end(err) } - }, - close: async () => { - listenerPushable.end() - this.connections.delete(listener) - listener.timeline.close = Date.now() - - dialerPushable.end() - this.connections.delete(dialer) - dialer.timeline.close = Date.now() - }, - abort: (err) => { - listenerPushable.end(err) - this.connections.delete(listener) - listener.timeline.close = Date.now() - - dialerPushable.end(err) - this.connections.delete(dialer) - dialer.timeline.close = Date.now() - }, - timeline: { - open: Date.now() - }, + } + }) + + const dialer = pushableToMaConn({ + connection: this, + remoteAddr: multiaddr(`${this.init.address}/p2p/${this.components.peerId}`), + direction: 'outbound', + localPushable: dialerPushable, + remotePushable: listenerPushable, + log: this.log.newScope(`${connectionId}:outbound`) + }) + + const listener = pushableToMaConn({ + connection: this, remoteAddr: multiaddr(`${this.init.address}-outgoing/p2p/${dialingPeerId}`), - log: this.components.logger.forComponent('libp2p:memory') - } + direction: 'inbound', + localPushable: listenerPushable, + remotePushable: dialerPushable, + log: this.log.newScope(`${connectionId}:inbound`) + }) this.connections.add(dialer) this.connections.add(listener) + connectionId++ await raceSignal(delay(this.latency), signal) diff --git a/packages/transport-memory/src/pushable-to-conn.ts b/packages/transport-memory/src/pushable-to-conn.ts new file mode 100644 index 0000000000..2dab69aad9 --- /dev/null +++ b/packages/transport-memory/src/pushable-to-conn.ts @@ -0,0 +1,80 @@ +import { StreamResetError } from '@libp2p/interface' +import { AbstractMultiaddrConnection } from '@libp2p/utils' +import delay from 'delay' +import map from 'it-map' +import { Uint8ArrayList } from 'uint8arraylist' +import type { MemoryConnection } from './connections.ts' +import type { MessageStreamDirection, MultiaddrConnection, AbortOptions } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult } from '@libp2p/utils' +import type { Multiaddr } from '@multiformats/multiaddr' +import type { Pushable } from 'it-pushable' + +export interface MemoryMultiaddrConnectionInit extends Omit { + localPushable: Pushable + remotePushable: Pushable + inactivityTimeout?: number + closeTimeout?: number + listeningAddr?: Multiaddr + connection: MemoryConnection + direction: MessageStreamDirection +} + +class MemoryMultiaddrConnection extends AbstractMultiaddrConnection { + private localPushable: Pushable + + constructor (init: MemoryMultiaddrConnectionInit) { + super(init) + + this.localPushable = init.localPushable + + Promise.resolve() + .then(async () => { + for await (const buf of map(init.remotePushable, async buf => { + if (init.connection.latency > 0) { + await delay(init.connection.latency) + } + + return buf + })) { + this.onData(buf) + } + }) + .catch(err => { + this.abort(err) + }) + } + + sendReset (): void { + this.localPushable.end(new StreamResetError()) + } + + sendData (data: Uint8ArrayList): SendResult { + this.localPushable.push(data) + + return { + sentBytes: data.byteLength, + canSendMore: true + } + } + + async sendCloseWrite (options?: AbortOptions): Promise { + this.localPushable.end() + options?.signal?.throwIfAborted() + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendPause (): void { + // read backpressure is not supported + } + + sendResume (): void { + // read backpressure is not supported + } +} + +export function pushableToMaConn (init: MemoryMultiaddrConnectionInit): MultiaddrConnection { + return new MemoryMultiaddrConnection(init) +} diff --git a/packages/transport-tcp/package.json b/packages/transport-tcp/package.json index da799c4fa8..e1023b993c 100644 --- a/packages/transport-tcp/package.json +++ b/packages/transport-tcp/package.json @@ -59,15 +59,16 @@ "@multiformats/multiaddr-matcher": "^2.0.0", "@types/sinon": "^17.0.4", "main-event": "^1.0.1", - "p-defer": "^4.0.1", "p-event": "^6.0.1", "progress-events": "^1.0.1", - "race-event": "^1.3.0", - "stream-to-it": "^1.0.1" + "race-event": "^1.6.0", + "uint8arraylist": "^2.4.8" }, "devDependencies": { "@libp2p/logger": "^5.1.21", "aegir": "^47.0.14", + "delay": "^6.0.0", + "p-defer": "^4.0.1", "p-wait-for": "^5.0.2", "sinon": "^20.0.0", "sinon-ts": "^2.0.0", diff --git a/packages/transport-tcp/src/index.ts b/packages/transport-tcp/src/index.ts index 596789a51e..94d386e39c 100644 --- a/packages/transport-tcp/src/index.ts +++ b/packages/transport-tcp/src/index.ts @@ -59,11 +59,6 @@ export interface TCPOptions { */ outboundSocketInactivityTimeout?: number - /** - * When closing a socket, wait this long for it to close gracefully before it is closed more forcibly - */ - socketCloseTimeout?: number - /** * Set this property to reject connections when the server's connection count gets high. * https://nodejs.org/api/net.html#servermaxconnections diff --git a/packages/transport-tcp/src/listener.ts b/packages/transport-tcp/src/listener.ts index 2114455204..121053ff78 100644 --- a/packages/transport-tcp/src/listener.ts +++ b/packages/transport-tcp/src/listener.ts @@ -1,6 +1,6 @@ import net from 'net' import { AlreadyStartedError, InvalidParametersError, NotStartedError } from '@libp2p/interface' -import { getThinWaistAddresses } from '@libp2p/utils/get-thin-waist-addresses' +import { getThinWaistAddresses } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { TypedEventEmitter, setMaxListeners } from 'main-event' import { pEvent } from 'p-event' @@ -13,8 +13,7 @@ import type { Multiaddr } from '@multiformats/multiaddr' interface Context extends TCPCreateListenerOptions { upgrader: Upgrader - socketInactivityTimeout?: number - socketCloseTimeout?: number + inactivityTimeout?: number maxConnections?: number backlog?: number metrics?: Metrics @@ -161,14 +160,14 @@ export class TCPListener extends TypedEventEmitter implements Li let maConn: MultiaddrConnection try { - maConn = toMultiaddrConnection(socket, { - listeningAddr: this.status.listeningAddr, - socketInactivityTimeout: this.context.socketInactivityTimeout, - socketCloseTimeout: this.context.socketCloseTimeout, + maConn = toMultiaddrConnection({ + socket, + inactivityTimeout: this.context.inactivityTimeout, metrics: this.metrics?.events, metricPrefix: `${this.addr} `, - logger: this.context.logger, - direction: 'inbound' + direction: 'inbound', + localAddr: this.status.listeningAddr, + log: this.context.logger.forComponent('libp2p:tcp:connection:inbound') }) } catch (err: any) { this.log.error('inbound connection failed', err) diff --git a/packages/transport-tcp/src/socket-to-conn.ts b/packages/transport-tcp/src/socket-to-conn.ts index 267c2cca16..abc0d8ec88 100644 --- a/packages/transport-tcp/src/socket-to-conn.ts +++ b/packages/transport-tcp/src/socket-to-conn.ts @@ -1,230 +1,139 @@ import { InvalidParametersError, TimeoutError } from '@libp2p/interface' -import { ipPortToMultiaddr as toMultiaddr } from '@libp2p/utils/ip-port-to-multiaddr' -import pDefer from 'p-defer' +import { AbstractMultiaddrConnection, socketWriter, ipPortToMultiaddr } from '@libp2p/utils' +import { Unix } from '@multiformats/multiaddr-matcher' import { raceEvent } from 'race-event' -import { duplex } from 'stream-to-it' -import { CLOSE_TIMEOUT, SOCKET_TIMEOUT } from './constants.js' -import { multiaddrToNetConfig } from './utils.js' -import type { ComponentLogger, MultiaddrConnection, CounterGroup } from '@libp2p/interface' -import type { AbortOptions, Multiaddr } from '@multiformats/multiaddr' +import type { AbortOptions, MultiaddrConnection } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult, SocketWriter } from '@libp2p/utils' +import type { Multiaddr } from '@multiformats/multiaddr' import type { Socket } from 'net' -import type { DeferredPromise } from 'p-defer' +import type { Uint8ArrayList } from 'uint8arraylist' -interface ToConnectionOptions { - listeningAddr?: Multiaddr +export interface TCPSocketMultiaddrConnectionInit extends Omit { + socket: Socket remoteAddr?: Multiaddr - localAddr?: Multiaddr - socketInactivityTimeout?: number - socketCloseTimeout?: number - metrics?: CounterGroup - metricPrefix?: string - logger: ComponentLogger - direction: 'inbound' | 'outbound' } -/** - * Convert a socket into a MultiaddrConnection - * https://github.com/libp2p/interface-transport#multiaddrconnection - */ -export const toMultiaddrConnection = (socket: Socket, options: ToConnectionOptions): MultiaddrConnection => { - let closePromise: DeferredPromise - const direction = options.direction - const metrics = options.metrics - const metricPrefix = options.metricPrefix ?? '' - const inactivityTimeout = options.socketInactivityTimeout ?? SOCKET_TIMEOUT - const closeTimeout = options.socketCloseTimeout ?? CLOSE_TIMEOUT - let timedOut = false - let errored = false - - // Check if we are connected on a unix path - if (options.listeningAddr?.getPath() != null) { - options.remoteAddr = options.listeningAddr - } - - if (options.remoteAddr?.getPath() != null) { - options.localAddr = options.remoteAddr - } - - // handle socket errors - socket.on('error', err => { - errored = true - - if (!timedOut) { - maConn.log.error('%s socket error - %e', direction, err) - metrics?.increment({ [`${metricPrefix}error`]: true }) - } - - socket.destroy() - maConn.timeline.close = Date.now() - }) - - let remoteAddr: Multiaddr - - if (options.remoteAddr != null) { - remoteAddr = options.remoteAddr - } else { - if (socket.remoteAddress == null || socket.remotePort == null) { - // this can be undefined if the socket is destroyed (for example, if the client disconnected) - // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#socketremoteaddress - throw new InvalidParametersError('Could not determine remote address or port') - } - - remoteAddr = toMultiaddr(socket.remoteAddress, socket.remotePort) - } +class TCPSocketMultiaddrConnection extends AbstractMultiaddrConnection { + private socket: Socket + private writer: SocketWriter + + constructor (init: TCPSocketMultiaddrConnectionInit) { + let remoteAddr = init.remoteAddr + + // check if we are connected on a unix path + if (init.localAddr != null && Unix.matches(init.localAddr)) { + remoteAddr = init.localAddr + } else if (remoteAddr == null) { + if (init.socket.remoteAddress == null || init.socket.remotePort == null) { + // this can be undefined if the socket is destroyed (for example, if the client disconnected) + // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#socketremoteaddress + throw new InvalidParametersError('Could not determine remote address or port') + } - const lOpts = multiaddrToNetConfig(remoteAddr) - const lOptsStr = lOpts.path ?? `${lOpts.host ?? ''}:${lOpts.port ?? ''}` - const { sink, source } = duplex(socket) - - // by default there is no timeout - // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#socketsettimeouttimeout-callback - socket.setTimeout(inactivityTimeout) - - socket.once('timeout', () => { - timedOut = true - maConn.log('%s %s socket read timeout', direction, lOptsStr) - metrics?.increment({ [`${metricPrefix}timeout`]: true }) - - // if the socket times out due to inactivity we must manually close the connection - // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#event-timeout - socket.destroy(new TimeoutError()) - maConn.timeline.close = Date.now() - }) - - socket.once('close', () => { - // record metric for clean exit - if (!timedOut && !errored) { - maConn.log('%s %s socket close', direction, lOptsStr) - metrics?.increment({ [`${metricPrefix}close`]: true }) + remoteAddr = ipPortToMultiaddr(init.socket.remoteAddress, init.socket.remotePort) } - // In instances where `close` was not explicitly called, - // such as an iterable stream ending, ensure we have set the close - // timeline - socket.destroy() - maConn.timeline.close = Date.now() - }) - - socket.once('end', () => { - // the remote sent a FIN packet which means no more data will be sent - // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#event-end - maConn.log('%s %s socket end', direction, lOptsStr) - metrics?.increment({ [`${metricPrefix}end`]: true }) - }) - - const maConn: MultiaddrConnection = { - async sink (source) { - try { - await sink((async function * () { - for await (const buf of source) { - if (buf instanceof Uint8Array) { - yield buf - } else { - yield buf.subarray() - } - } - })()) - } catch (err: any) { - // If aborted we can safely ignore - if (err.type !== 'aborted') { - // If the source errored the socket will already have been destroyed by - // duplex(). If the socket errored it will already be - // destroyed. There's nothing to do here except log the error & return. - maConn.log.error('%s %s error in sink - %e', direction, lOptsStr, err) - } + super({ + ...init, + remoteAddr + }) + + this.socket = init.socket + + // handle incoming data + this.socket.on('data', buf => { + this.onData(buf) + }) + + // handle socket errors + this.socket.on('error', err => { + this.abort(err) + }) + + // by default there is no timeout + // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#socketsettimeouttimeout-callback + this.socket.setTimeout(init.inactivityTimeout ?? (2 * 60 * 1_000)) + + this.socket.once('timeout', () => { + // if the socket times out due to inactivity we must manually close the connection + // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#event-timeout + this.abort(new TimeoutError()) + }) + + this.socket.once('end', () => { + // the remote sent a FIN packet which means no more data will be sent + // https://nodejs.org/dist/latest-v16.x/docs/api/net.html#event-end + this.onRemoteCloseWrite() + }) + + this.socket.once('close', hadError => { + if (hadError) { + this.abort(new Error('TCP transmission error')) + return } - // we have finished writing, send the FIN message - socket.end() - }, + this.onRemoteCloseWrite() + this.onClosed() + }) - source, + // the socket can accept more data + this.socket.on('drain', () => { + this.safeDispatchEvent('drain') + }) - // If the remote address was passed, use it - it may have the peer ID encapsulated - remoteAddr, + this.writer = socketWriter(this.socket) + } - timeline: { open: Date.now() }, + sendData (data: Uint8ArrayList): SendResult { + let sentBytes = 0 + let canSendMore = true - async close (options: AbortOptions = {}) { - if (socket.closed) { - maConn.log('the %s %s socket is already closed', direction, lOptsStr) - return - } + for (const buf of data) { + sentBytes += buf.byteLength + canSendMore = this.writer.write(buf) - if (socket.destroyed) { - maConn.log('the %s %s socket is already destroyed', direction, lOptsStr) - return + if (!canSendMore) { + break } + } - if (closePromise != null) { - return closePromise.promise - } + return { + sentBytes, + canSendMore + } + } - try { - closePromise = pDefer() - - // close writable end of socket - socket.end() - - // convert EventEmitter to EventTarget - const eventTarget = socketToEventTarget(socket) - - // don't wait forever to close - const signal = options.signal ?? AbortSignal.timeout(closeTimeout) - - // wait for any unsent data to be sent - if (socket.writableLength > 0) { - maConn.log('%s %s draining socket', direction, lOptsStr) - await raceEvent(eventTarget, 'drain', signal, { - errorEvent: 'error' - }) - maConn.log('%s %s socket drained', direction, lOptsStr) - } - - await Promise.all([ - raceEvent(eventTarget, 'close', signal, { - errorEvent: 'error' - }), - - // all bytes have been sent we can destroy the socket - socket.destroy() - ]) - } catch (err: any) { - this.abort(err) - } finally { - closePromise.resolve() - } - }, + async sendCloseWrite (options?: AbortOptions): Promise { + if (this.socket.destroyed) { + return + } - abort: (err: Error) => { - maConn.log('%s %s socket abort due to error - %e', direction, lOptsStr, err) + this.socket.end() - // the abortSignalListener may already destroyed the socket with an error - socket.destroy() + await raceEvent(this.socket, 'close', options?.signal) + } - // closing a socket is always asynchronous (must wait for "close" event) - // but the tests expect this to be a synchronous operation so we have to - // set the close time here. the tests should be refactored to reflect - // reality. - maConn.timeline.close = Date.now() - }, + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } - log: options.logger.forComponent('libp2p:tcp:connection') + sendReset (): void { + this.socket.resetAndDestroy() } - return maConn -} + sendPause (): void { + this.socket.pause() + } -function socketToEventTarget (obj?: any): EventTarget { - const eventTarget = { - addEventListener: (type: any, cb: any) => { - obj.addListener(type, cb) - }, - removeEventListener: (type: any, cb: any) => { - obj.removeListener(type, cb) - } + sendResume (): void { + this.socket.resume() } +} - // @ts-expect-error partial implementation - return eventTarget +/** + * Convert a socket into a MultiaddrConnection + * https://github.com/libp2p/interface-transport#multiaddrconnection + */ +export const toMultiaddrConnection = (init: TCPSocketMultiaddrConnectionInit): MultiaddrConnection => { + return new TCPSocketMultiaddrConnection(init) } diff --git a/packages/transport-tcp/src/tcp.ts b/packages/transport-tcp/src/tcp.ts index 9856d1fad7..51e08cec38 100644 --- a/packages/transport-tcp/src/tcp.ts +++ b/packages/transport-tcp/src/tcp.ts @@ -82,13 +82,13 @@ export class TCP implements Transport { let maConn: MultiaddrConnection try { - maConn = toMultiaddrConnection(socket, { - remoteAddr: ma, - socketInactivityTimeout: this.opts.outboundSocketInactivityTimeout, - socketCloseTimeout: this.opts.socketCloseTimeout, + maConn = toMultiaddrConnection({ + socket, + inactivityTimeout: this.opts.outboundSocketInactivityTimeout, metrics: this.metrics?.events, - logger: this.components.logger, - direction: 'outbound' + direction: 'outbound', + remoteAddr: ma, + log: this.components.logger.forComponent('libp2p:tcp:connection:outbound') }) } catch (err: any) { this.metrics?.errors.increment({ outbound_to_connection: true }) @@ -192,8 +192,7 @@ export class TCP implements Transport { maxConnections: this.opts.maxConnections, backlog: this.opts.backlog, closeServerOnMaxConnections: this.opts.closeServerOnMaxConnections, - socketInactivityTimeout: this.opts.inboundSocketInactivityTimeout, - socketCloseTimeout: this.opts.socketCloseTimeout, + inactivityTimeout: this.opts.inboundSocketInactivityTimeout, metrics: this.components.metrics, logger: this.components.logger }) diff --git a/packages/transport-tcp/test/socket-to-conn.spec.ts b/packages/transport-tcp/test/socket-to-conn.spec.ts index 879186bdad..a97586b050 100644 --- a/packages/transport-tcp/test/socket-to-conn.spec.ts +++ b/packages/transport-tcp/test/socket-to-conn.spec.ts @@ -1,44 +1,47 @@ import { createServer, Socket } from 'net' import { defaultLogger } from '@libp2p/logger' +import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' -import defer from 'p-defer' -import Sinon from 'sinon' +import delay from 'delay' +import { pEvent } from 'p-event' import { toMultiaddrConnection } from '../src/socket-to-conn.js' -import type { Server, ServerOpts, SocketConstructorOpts } from 'net' +import type { Server, ServerOpts, SocketConstructorOpts } from 'node:net' -async function setup (opts?: { server?: ServerOpts, client?: SocketConstructorOpts }): Promise<{ server: Server, serverSocket: Socket, clientSocket: Socket }> { - const serverListening = defer() - - const server = createServer(opts?.server) - server.listen(0, () => { - serverListening.resolve() - }) - - await serverListening.promise +interface TestOptions { + server?: ServerOpts + client?: SocketConstructorOpts +} - const serverSocket = defer() - const clientSocket = defer() +interface TestFixture { + server: Server + serverSocket: Socket + clientSocket: Socket +} - server.once('connection', (socket) => { - serverSocket.resolve(socket) - }) +async function setup (opts?: TestOptions): Promise { + const server = createServer(opts?.server) + server.listen(0) + await pEvent(server, 'listening') const address = server.address() - if (address == null || typeof address === 'string') { throw new Error('Wrong socket type') } const client = new Socket(opts?.client) - client.once('connect', () => { - clientSocket.resolve(client) - }) client.connect(address.port, address.address) + const [ + serverSocket + ] = await Promise.all([ + pEvent(server, 'connection'), + pEvent(client, 'connect') + ]) + return { server, - serverSocket: await serverSocket.promise, - clientSocket: await clientSocket.promise + serverSocket, + clientSocket: client } } @@ -65,21 +68,23 @@ describe('socket-to-conn', () => { ({ server, clientSocket, serverSocket } = await setup()) // promise that is resolved when client socket is closed - const clientClosed = defer() + const clientClosed = Promise.withResolvers() // promise that is resolved when client socket errors - const clientErrored = defer() + const clientErrored = Promise.withResolvers() // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() // promise that is resolved when our outgoing socket errors - const serverErrored = defer() - - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const serverErrored = Promise.withResolvers() + + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + inactivityTimeout: 100, + direction: 'inbound', + remoteAddr: multiaddr('/ip4/123.123.123.123/tcp/1234'), + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -108,7 +113,11 @@ describe('socket-to-conn', () => { // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() - // the connection closing was recorded + // the remote writable end closing was recorded + expect(inboundMaConn.timeline.remoteCloseWrite).to.be.a('number') + + // by default our tcp connections are not allowed to be half-open so we + // should have closed expect(inboundMaConn.timeline.close).to.be.a('number') // server socket is destroyed @@ -119,21 +128,26 @@ describe('socket-to-conn', () => { ({ server, clientSocket, serverSocket } = await setup()) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() // promise that is resolved when our outgoing socket errors - const serverErrored = defer() + const serverErrored = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + inactivityTimeout: 100, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() serverSocket.once('close', () => { serverClosed.resolve(true) + + // it's possible for the server socket to close cleanly in response to the + // client destroy if no data was being sent/read at the time + serverErrored.resolve(undefined) }) serverSocket.once('error', err => { serverErrored.resolve(err) @@ -148,16 +162,20 @@ describe('socket-to-conn', () => { const error = await serverErrored.promise - // the error can be of either type - if (error.name !== 'TimeoutError' && error.code !== 'ECONNRESET') { - expect.fail('promise rejected with unknown error type') + // if the error occurred, it can be of either type + if (error != null && (error.name !== 'TimeoutError' && error.code !== 'ECONNRESET')) { + expect.fail(`Promise rejected with unknown error type - ${error}`) } // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() - // the connection closing was recorded - expect(inboundMaConn.timeline.close).to.be.a('number') + // the connection closing or aborting was recorded + if (error == null) { + expect(inboundMaConn.timeline.close).to.be.a('number') + } else { + expect(inboundMaConn.timeline.abort).to.be.a('number') + } // server socket is destroyed expect(serverSocket.destroyed).to.be.true() @@ -173,15 +191,16 @@ describe('socket-to-conn', () => { clientSocket.setTimeout(100) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() // promise that is resolved when the incoming socket is closed - const clientClosed = defer() + const clientClosed = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + inactivityTimeout: 100, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn:inbound') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -219,16 +238,16 @@ describe('socket-to-conn', () => { expect(serverSocket.destroyed).to.be.true() }) - it('should destroy a socket after sinking', async () => { + it('should destroy a socket after closing', async () => { ({ server, clientSocket, serverSocket } = await setup()) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn:inbound') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -237,10 +256,12 @@ describe('socket-to-conn', () => { serverClosed.resolve(true) }) + // close the client writable end + clientSocket.end() + // send some data between the client and server - await inboundMaConn.sink(async function * () { - yield Uint8Array.from([0, 1, 2, 3]) - }()) + inboundMaConn.send(Uint8Array.from([0, 1, 2, 3])) + await inboundMaConn.closeWrite() // server socket should no longer be writable expect(serverSocket.writable).to.be.false() @@ -259,13 +280,12 @@ describe('socket-to-conn', () => { ({ server, clientSocket, serverSocket } = await setup()) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - socketCloseTimeout: 10, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -280,7 +300,10 @@ describe('socket-to-conn', () => { clientSocket.write('hello') serverSocket.write('goodbye') - await inboundMaConn.close() + // close the client writable end + clientSocket.end() + + await inboundMaConn.closeWrite() // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() @@ -305,14 +328,12 @@ describe('socket-to-conn', () => { }) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() - const socketCloseTimeout = 10 - - const inboundMaConn = toMultiaddrConnection(proxyServerSocket, { - socketInactivityTimeout: 100, - socketCloseTimeout, - logger: defaultLogger(), - direction: 'inbound' + const serverClosed = Promise.withResolvers() + + const inboundMaConn = toMultiaddrConnection({ + socket: proxyServerSocket, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -327,15 +348,13 @@ describe('socket-to-conn', () => { clientSocket.write('hello') serverSocket.write('goodbye') - const signal = AbortSignal.timeout(socketCloseTimeout) - const addEventListenerSpy = Sinon.spy(signal, 'addEventListener') + // close the client writable end + clientSocket.end() // the 2nd and 3rd call should return immediately - await Promise.all([ - inboundMaConn.close({ signal }), - inboundMaConn.close({ signal }), - inboundMaConn.close({ signal }) - ]) + inboundMaConn.closeWrite() + inboundMaConn.closeWrite() + inboundMaConn.closeWrite() // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() @@ -345,9 +364,6 @@ describe('socket-to-conn', () => { // server socket is destroyed expect(serverSocket.destroyed).to.be.true() - - // the server socket was only closed once - expect(addEventListenerSpy.callCount).to.equal(1) }) it('should destroy a socket when incoming MultiaddrConnection is closed', async () => { @@ -358,13 +374,12 @@ describe('socket-to-conn', () => { })) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - socketCloseTimeout: 10, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -379,7 +394,10 @@ describe('socket-to-conn', () => { clientSocket.write('hello') serverSocket.write('goodbye') - await inboundMaConn.close() + // close the client writable end + clientSocket.end() + + await inboundMaConn.closeWrite() // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() @@ -391,7 +409,7 @@ describe('socket-to-conn', () => { expect(serverSocket.destroyed).to.be.true() }) - it('should destroy a socket when incoming MultiaddrConnection is closed but remote keeps sending data', async () => { + it('should destroy a socket when incoming MultiaddrConnection is aborted but remote keeps sending data', async () => { ({ server, clientSocket, serverSocket } = await setup({ server: { allowHalfOpen: true @@ -399,13 +417,13 @@ describe('socket-to-conn', () => { })) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 500, - socketCloseTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + inactivityTimeout: 500, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() @@ -420,38 +438,43 @@ describe('socket-to-conn', () => { clientSocket.write('hello') serverSocket.write('goodbye') - setInterval(() => { + const interval = setInterval(() => { clientSocket.write(`some data ${Date.now()}`) - }, 10).unref() + }, 10) + + // ensure the sockets are open fully + await delay(1_000) - await inboundMaConn.close() + inboundMaConn.abort(new Error('Not interested')) // server socket was closed for reading and writing await expect(serverClosed.promise).to.eventually.be.true() // the connection closing was recorded - expect(inboundMaConn.timeline.close).to.be.a('number') + expect(inboundMaConn.timeline.abort).to.be.a('number') // server socket is destroyed expect(serverSocket.destroyed).to.be.true() + + clearInterval(interval) }) it('should destroy a socket by inactivity timeout', async () => { ({ server, clientSocket, serverSocket } = await setup()) // promise that is resolved when our outgoing socket is closed - const serverClosed = defer() + const serverClosed = Promise.withResolvers() // promise that resolves when reading from the outgoing socket times out - const serverTimedOut = defer() + const serverTimedOut = Promise.withResolvers() - const clientError = defer() + const clientError = Promise.withResolvers() - const inboundMaConn = toMultiaddrConnection(serverSocket, { - socketInactivityTimeout: 100, - socketCloseTimeout: 100, - logger: defaultLogger(), - direction: 'inbound' + const inboundMaConn = toMultiaddrConnection({ + socket: serverSocket, + inactivityTimeout: 100, + direction: 'inbound', + log: defaultLogger().forComponent('libp2p:test-maconn') }) expect(inboundMaConn.timeline.open).to.be.ok() expect(inboundMaConn.timeline.close).to.not.be.ok() diff --git a/packages/transport-webrtc/README.md b/packages/transport-webrtc/README.md index 6d90030e64..2a738c1ad7 100644 --- a/packages/transport-webrtc/README.md +++ b/packages/transport-webrtc/README.md @@ -58,7 +58,6 @@ import { webRTC } from '@libp2p/webrtc' import { webSockets } from '@libp2p/websockets' import { WebRTC } from '@multiformats/multiaddr-matcher' import delay from 'delay' -import { pipe } from 'it-pipe' import { createLibp2p } from 'libp2p' import type { Multiaddr } from '@multiformats/multiaddr' @@ -157,15 +156,11 @@ const stream = await dialer.dialProtocol(webRTCMultiaddr, dialer.services.echo.p await relay.stop() // send/receive some data from the remote peer via a direct connection -await pipe( - [new TextEncoder().encode('hello world')], - stream, - async source => { - for await (const buf of source) { - console.info(new TextDecoder().decode(buf.subarray())) - } - } -) +stream.send(new TextEncoder().encode('hello world')) + +stream.addEventListener('message', (evt) => { + console.info(new TextDecoder().decode(evt.data.subarray())) +}) ``` ## Example - WebRTC Direct @@ -190,7 +185,6 @@ Node.js/go and rust-libp2p can listen on and dial WebRTC Direct addresses. ```TypeScript import { createLibp2p } from 'libp2p' import { multiaddr } from '@multiformats/multiaddr' -import { pipe } from 'it-pipe' import { fromString, toString } from 'uint8arrays' import { webRTCDirect } from '@libp2p/webrtc' @@ -219,15 +213,11 @@ const stream = await dialer.dialProtocol(listener.getMultiaddrs(), '/my-protocol signal: AbortSignal.timeout(10_000) }) -await pipe( - [fromString(`Hello js-libp2p-webrtc\n`)], - stream, - async function (source) { - for await (const buf of source) { - console.info(toString(buf.subarray())) - } - } -) +stream.send(new TextEncoder().encode('hello world')) + +stream.addEventListener('message', (evt) => { + console.info(new TextDecoder().decode(evt.data.subarray())) +}) ``` ## WebRTC Direct certificate hashes diff --git a/packages/transport-webrtc/package.json b/packages/transport-webrtc/package.json index 007a317c74..e1282a1dd7 100644 --- a/packages/transport-webrtc/package.json +++ b/packages/transport-webrtc/package.json @@ -57,7 +57,6 @@ "@multiformats/multiaddr-matcher": "^2.0.0", "@peculiar/webcrypto": "^1.5.0", "@peculiar/x509": "^1.12.3", - "any-signal": "^4.1.1", "detect-browser": "^5.3.0", "get-port": "^7.1.0", "interface-datastore": "^8.3.1", @@ -72,22 +71,19 @@ "p-wait-for": "^5.0.2", "progress-events": "^1.0.1", "protons-runtime": "^5.5.0", - "race-event": "^1.3.0", - "race-signal": "^1.1.3", + "race-event": "^1.6.0", "react-native-webrtc": "^124.0.5", "uint8-varint": "^2.0.4", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" }, "devDependencies": { - "@libp2p/interface-compliance-tests": "^6.4.16", "@libp2p/logger": "^5.1.21", "@types/sinon": "^17.0.4", "aegir": "^47.0.14", + "any-signal": "^4.1.1", "datastore-core": "^10.0.2", "delay": "^6.0.0", - "it-length": "^3.0.8", - "it-pair": "^2.0.6", "p-retry": "^6.2.1", "protons": "^7.6.1", "sinon": "^20.0.0", diff --git a/packages/transport-webrtc/src/constants.ts b/packages/transport-webrtc/src/constants.ts index 7d2f8d04a6..fe20876aa3 100644 --- a/packages/transport-webrtc/src/constants.ts +++ b/packages/transport-webrtc/src/constants.ts @@ -41,11 +41,6 @@ export const CODEC_CERTHASH = 0x01d2 */ export const MAX_BUFFERED_AMOUNT = 2 * 1024 * 1024 -/** - * How long time we wait for the 'bufferedamountlow' event to be emitted - */ -export const BUFFERED_AMOUNT_LOW_TIMEOUT = 30 * 1000 - /** * Max message size that can be sent to the DataChannel. In browsers this is * 256KiB but go-libp2p and rust-libp2p only support 16KiB at the time of @@ -82,19 +77,6 @@ function calculateProtobufOverhead (maxMessageSize = MAX_MESSAGE_SIZE): number { */ export const PROTOBUF_OVERHEAD = calculateProtobufOverhead() -/** - * When closing streams we send a FIN then wait for the remote to - * reply with a FIN_ACK. If that does not happen within this timeout - * we close the stream anyway. - */ -export const FIN_ACK_TIMEOUT = 5_000 - -/** - * When sending data messages, if the channel is not in the "open" state, wait - * this long for the "open" event to fire. - */ -export const OPEN_TIMEOUT = 5_000 - /** * When closing a stream, we wait for `bufferedAmount` to become 0 before * closing the underlying RTCDataChannel - this controls how long we wait in ms diff --git a/packages/transport-webrtc/src/index.ts b/packages/transport-webrtc/src/index.ts index 98113beafe..e89b6d5490 100644 --- a/packages/transport-webrtc/src/index.ts +++ b/packages/transport-webrtc/src/index.ts @@ -35,7 +35,6 @@ * import { webSockets } from '@libp2p/websockets' * import { WebRTC } from '@multiformats/multiaddr-matcher' * import delay from 'delay' - * import { pipe } from 'it-pipe' * import { createLibp2p } from 'libp2p' * import type { Multiaddr } from '@multiformats/multiaddr' * @@ -134,15 +133,11 @@ * await relay.stop() * * // send/receive some data from the remote peer via a direct connection - * await pipe( - * [new TextEncoder().encode('hello world')], - * stream, - * async source => { - * for await (const buf of source) { - * console.info(new TextDecoder().decode(buf.subarray())) - * } - * } - * ) + * stream.send(new TextEncoder().encode('hello world')) + * + * stream.addEventListener('message', (evt) => { + * console.info(new TextDecoder().decode(evt.data.subarray())) + * }) * ``` * * @example WebRTC Direct @@ -167,7 +162,6 @@ * ```TypeScript * import { createLibp2p } from 'libp2p' * import { multiaddr } from '@multiformats/multiaddr' - * import { pipe } from 'it-pipe' * import { fromString, toString } from 'uint8arrays' * import { webRTCDirect } from '@libp2p/webrtc' * @@ -196,15 +190,11 @@ * signal: AbortSignal.timeout(10_000) * }) * - * await pipe( - * [fromString(`Hello js-libp2p-webrtc\n`)], - * stream, - * async function (source) { - * for await (const buf of source) { - * console.info(toString(buf.subarray())) - * } - * } - * ) + * stream.send(new TextEncoder().encode('hello world')) + * + * stream.addEventListener('message', (evt) => { + * console.info(new TextDecoder().decode(evt.data.subarray())) + * }) * ``` * * ## WebRTC Direct certificate hashes diff --git a/packages/transport-webrtc/src/maconn.ts b/packages/transport-webrtc/src/maconn.ts deleted file mode 100644 index 02a41255df..0000000000 --- a/packages/transport-webrtc/src/maconn.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { nopSink, nopSource } from './util.js' -import type { RTCPeerConnection } from './webrtc/index.js' -import type { ComponentLogger, Logger, MultiaddrConnection, MultiaddrConnectionTimeline, CounterGroup } from '@libp2p/interface' -import type { AbortOptions, Multiaddr } from '@multiformats/multiaddr' -import type { Source, Sink } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' - -interface WebRTCMultiaddrConnectionInit { - /** - * WebRTC Peer Connection - */ - peerConnection: RTCPeerConnection - - /** - * The multiaddr address used to communicate with the remote peer - */ - remoteAddr: Multiaddr - - /** - * Holds the relevant events timestamps of the connection - */ - timeline: MultiaddrConnectionTimeline - - /** - * Optional metrics counter group for this connection - */ - metrics?: CounterGroup -} - -export interface WebRTCMultiaddrConnectionComponents { - logger: ComponentLogger -} - -export class WebRTCMultiaddrConnection implements MultiaddrConnection { - readonly log: Logger - - /** - * WebRTC Peer Connection - */ - readonly peerConnection: RTCPeerConnection - - /** - * The multiaddr address used to communicate with the remote peer - */ - remoteAddr: Multiaddr - - /** - * Holds the life cycle times of the connection - */ - timeline: MultiaddrConnectionTimeline - - /** - * Optional metrics counter group for this connection - */ - metrics?: CounterGroup - - /** - * The stream source, a no-op as the transport natively supports multiplexing - */ - source: AsyncGenerator = nopSource() - - /** - * The stream destination, a no-op as the transport natively supports multiplexing - */ - sink: Sink, Promise> = nopSink - - constructor (components: WebRTCMultiaddrConnectionComponents, init: WebRTCMultiaddrConnectionInit) { - this.log = components.logger.forComponent('libp2p:webrtc:maconn') - this.remoteAddr = init.remoteAddr - this.timeline = init.timeline - this.peerConnection = init.peerConnection - - const peerConnection = this.peerConnection - const initialState = peerConnection.connectionState - - this.peerConnection.onconnectionstatechange = () => { - this.log.trace('peer connection state change', peerConnection.connectionState, 'initial state', initialState) - - if (peerConnection.connectionState === 'disconnected' || peerConnection.connectionState === 'failed' || peerConnection.connectionState === 'closed') { - // nothing else to do but close the connection - this.timeline.close = Date.now() - } - } - } - - async close (options?: AbortOptions): Promise { - this.log.trace('closing connection') - - this.peerConnection.close() - this.timeline.close = Date.now() - this.metrics?.increment({ close: true }) - } - - abort (err: Error): void { - this.log.error('closing connection due to error', err) - - this.peerConnection.close() - this.timeline.close = Date.now() - this.metrics?.increment({ abort: true }) - } -} diff --git a/packages/transport-webrtc/src/muxer.ts b/packages/transport-webrtc/src/muxer.ts index b6ba6f616b..98aab81832 100644 --- a/packages/transport-webrtc/src/muxer.ts +++ b/packages/transport-webrtc/src/muxer.ts @@ -1,11 +1,9 @@ +import { AbstractStreamMuxer } from '@libp2p/utils' +import { raceEvent } from 'race-event' import { MUXER_PROTOCOL } from './constants.js' -import { createStream } from './stream.js' -import { drainAndClose, nopSink, nopSource } from './util.js' +import { createStream, WebRTCStream } from './stream.js' import type { DataChannelOptions } from './index.js' -import type { ComponentLogger, Logger, Stream, CounterGroup, StreamMuxer, StreamMuxerFactory, StreamMuxerInit } from '@libp2p/interface' -import type { AbortOptions } from '@multiformats/multiaddr' -import type { Source, Sink } from 'it-stream-types' -import type { Uint8ArrayList } from 'uint8arraylist' +import type { ComponentLogger, CounterGroup, StreamMuxer, StreamMuxerFactory, CreateStreamOptions, MultiaddrConnection } from '@libp2p/interface' export interface DataChannelMuxerFactoryInit { /** @@ -23,6 +21,9 @@ export interface DataChannelMuxerFactoryInit { */ metrics?: CounterGroup + /** + * Options used to create data channels + */ dataChannelOptions?: DataChannelOptions } @@ -30,12 +31,6 @@ export interface DataChannelMuxerFactoryComponents { logger: ComponentLogger } -interface BufferedStream { - stream: Stream - channel: RTCDataChannel - onEnd(err?: Error): void -} - export class DataChannelMuxerFactory implements StreamMuxerFactory { public readonly protocol: string @@ -43,69 +38,28 @@ export class DataChannelMuxerFactory implements StreamMuxerFactory { * WebRTC Peer Connection */ private readonly peerConnection: RTCPeerConnection - private bufferedStreams: BufferedStream[] = [] private readonly metrics?: CounterGroup private readonly dataChannelOptions?: DataChannelOptions - private readonly components: DataChannelMuxerFactoryComponents - private readonly log: Logger - constructor (components: DataChannelMuxerFactoryComponents, init: DataChannelMuxerFactoryInit) { - this.components = components + constructor (init: DataChannelMuxerFactoryInit) { this.peerConnection = init.peerConnection this.metrics = init.metrics this.protocol = init.protocol ?? MUXER_PROTOCOL this.dataChannelOptions = init.dataChannelOptions ?? {} - this.log = components.logger.forComponent('libp2p:webrtc:muxerfactory') - - // store any data channels opened before upgrade has been completed - this.peerConnection.ondatachannel = ({ channel }) => { - this.log.trace('incoming early datachannel with channel id %d and label "%s"', channel.id) - - // 'init' channel is only used during connection establishment - if (channel.label === 'init') { - this.log.trace('closing early init channel') - channel.close() - - return - } - - // @ts-expect-error fields are set below - const bufferedStream: BufferedStream = {} - - const stream = createStream({ - channel, - direction: 'inbound', - onEnd: (err) => { - bufferedStream.onEnd(err) - }, - log: this.log, - ...this.dataChannelOptions - }) - - bufferedStream.stream = stream - bufferedStream.channel = channel - bufferedStream.onEnd = () => { - this.bufferedStreams = this.bufferedStreams.filter(s => s.stream.id !== stream.id) - } - - this.bufferedStreams.push(bufferedStream) - } } - createStreamMuxer (init?: StreamMuxerInit): StreamMuxer { - return new DataChannelMuxer(this.components, { - ...init, + createStreamMuxer (maConn: MultiaddrConnection): StreamMuxer { + return new DataChannelMuxer(maConn, { peerConnection: this.peerConnection, dataChannelOptions: this.dataChannelOptions, metrics: this.metrics, - streams: this.bufferedStreams, protocol: this.protocol }) } } -export interface DataChannelMuxerInit extends DataChannelMuxerFactoryInit, StreamMuxerInit { - streams: BufferedStream[] +export interface DataChannelMuxerInit extends DataChannelMuxerFactoryInit { + protocol: string } export interface DataChannelMuxerComponents { @@ -115,23 +69,17 @@ export interface DataChannelMuxerComponents { /** * A libp2p data channel stream muxer */ -export class DataChannelMuxer implements StreamMuxer { - /** - * Array of streams in the data channel - */ - public streams: Stream[] - public protocol: string - - private readonly log: Logger +export class DataChannelMuxer extends AbstractStreamMuxer implements StreamMuxer { private readonly peerConnection: RTCPeerConnection private readonly dataChannelOptions: DataChannelOptions private readonly metrics?: CounterGroup - private readonly logger: ComponentLogger - constructor (components: DataChannelMuxerComponents, readonly init: DataChannelMuxerInit) { - this.log = init.log?.newScope('muxer') ?? components.logger.forComponent('libp2p:webrtc:muxer') - this.logger = components.logger - this.streams = init.streams.map(s => s.stream) + constructor (maConn: MultiaddrConnection, init: DataChannelMuxerInit) { + super(maConn, { + ...init, + name: 'muxer' + }) + this.peerConnection = init.peerConnection this.protocol = init.protocol ?? MUXER_PROTOCOL this.metrics = init.metrics @@ -154,109 +102,34 @@ export class DataChannelMuxer implements StreamMuxer { return } - // lib-datachannel throws if `.getId` is called on a closed channel so - // memoize it - const id = channel.id - const stream = createStream({ channel, direction: 'inbound', - onEnd: () => { - this.#onStreamEnd(stream, channel) - this.log('incoming channel %s ended', id) - }, log: this.log, ...this.dataChannelOptions }) - this.streams.push(stream) - this.metrics?.increment({ incoming_stream: true }) - init?.onIncomingStream?.(stream) - } - - // the DataChannelMuxer constructor is called during set up of the - // connection by the upgrader. - // - // If we invoke `init.onIncomingStream` immediately, the connection object - // will not be set up yet so add a tiny delay before letting the - // connection know about early streams - if (this.init.streams.length > 0) { - queueMicrotask(() => { - this.init.streams.forEach(bufferedStream => { - bufferedStream.onEnd = () => { - this.log('incoming early channel %s ended with state %s', bufferedStream.channel.id, bufferedStream.channel.readyState) - this.#onStreamEnd(bufferedStream.stream, bufferedStream.channel) - } - - this.metrics?.increment({ incoming_stream: true }) - this.init?.onIncomingStream?.(bufferedStream.stream) - }) - }) + this.onRemoteStream(stream) } } - #onStreamEnd (stream: Stream, channel: RTCDataChannel): void { - this.log.trace('stream %s %s %s onEnd', stream.direction, stream.id, stream.protocol) - drainAndClose( - channel, - `${stream.direction} ${stream.id} ${stream.protocol}`, - this.dataChannelOptions.drainTimeout, { - log: this.log - } - ) - this.streams = this.streams.filter(s => s.id !== stream.id) - this.metrics?.increment({ stream_end: true }) - this.init?.onStreamEnd?.(stream) - } + async onCreateStream (options?: CreateStreamOptions): Promise { + // The spec says the label MUST be an empty string: https://github.com/libp2p/specs/blob/master/webrtc/README.md#rtcdatachannel-label + const channel = this.peerConnection.createDataChannel('', { + // TODO: pre-negotiate stream protocol + // protocol: options?.protocol + }) - /** - * Gracefully close all tracked streams and stop the muxer - */ - async close (options?: AbortOptions): Promise { - try { - await Promise.all( - this.streams.map(async stream => stream.close(options)) - ) - } catch (err: any) { - this.abort(err) - } - } + if (channel.readyState !== 'open') { + this.log('channel state is "%s" and not "open", waiting for "open" event before sending data', channel.readyState) + await raceEvent(channel, 'open', options?.signal) - /** - * Abort all tracked streams and stop the muxer - */ - abort (err: Error): void { - for (const stream of this.streams) { - stream.abort(err) + this.log('channel state is now "%s", sending data', channel.readyState) } - } - - /** - * The stream source, a no-op as the transport natively supports multiplexing - */ - source: AsyncGenerator = nopSource() - - /** - * The stream destination, a no-op as the transport natively supports multiplexing - */ - sink: Sink, Promise> = nopSink - - newStream (): Stream { - // The spec says the label MUST be an empty string: https://github.com/libp2p/specs/blob/master/webrtc/README.md#rtcdatachannel-label - const channel = this.peerConnection.createDataChannel('') - // lib-datachannel throws if `.getId` is called on a closed channel so - // memoize it - const id = channel.id - - this.log.trace('opened outgoing datachannel with channel id %s', id) const stream = createStream({ channel, direction: 'outbound', - onEnd: () => { - this.#onStreamEnd(stream, channel) - this.log('outgoing channel %s ended', id) - }, log: this.log, ...this.dataChannelOptions }) @@ -265,4 +138,8 @@ export class DataChannelMuxer implements StreamMuxer { return stream } + + onData (): void { + + } } diff --git a/packages/transport-webrtc/src/private-to-private/initiate-connection.ts b/packages/transport-webrtc/src/private-to-private/initiate-connection.ts index c78b1a9646..7f769a2d1c 100644 --- a/packages/transport-webrtc/src/private-to-private/initiate-connection.ts +++ b/packages/transport-webrtc/src/private-to-private/initiate-connection.ts @@ -1,4 +1,4 @@ -import { pbStream } from 'it-protobuf-stream' +import { pbStream } from '@libp2p/utils' import { CustomProgressEvent } from 'progress-events' import { SIGNALING_PROTOCOL } from '../constants.js' import { SDPHandshakeFailedError } from '../error.js' @@ -9,15 +9,14 @@ import { splitAddr } from './transport.js' import { readCandidatesUntilConnected } from './util.js' import type { WebRTCDialEvents, WebRTCTransportMetrics } from './transport.js' import type { DataChannelOptions } from '../index.js' -import type { LoggerOptions, Connection, ComponentLogger, IncomingStreamData } from '@libp2p/interface' +import type { LoggerOptions, Connection, ComponentLogger } from '@libp2p/interface' import type { ConnectionManager, TransportManager } from '@libp2p/interface-internal' -import type { Multiaddr } from '@multiformats/multiaddr' +import type { AbortOptions, Multiaddr } from '@multiformats/multiaddr' import type { ProgressOptions } from 'progress-events' -export interface IncomingStreamOpts extends IncomingStreamData { +export interface IncomingStreamOptions extends AbortOptions { rtcConfiguration?: RTCConfiguration dataChannelOptions?: Partial - signal: AbortSignal } export interface ConnectOptions extends LoggerOptions, ProgressOptions { @@ -68,8 +67,6 @@ export async function initiateConnection ({ rtcConfiguration, dataChannel, signa const messageStream = pbStream(stream).pb(Message) const peerConnection = new RTCPeerConnection(rtcConfiguration) const muxerFactory = new DataChannelMuxerFactory({ - logger - }, { peerConnection, dataChannelOptions: dataChannel }) @@ -163,7 +160,7 @@ export async function initiateConnection ({ rtcConfiguration, dataChannel, signa onProgress?.(new CustomProgressEvent('webrtc:close-signaling-stream')) log.trace('closing signaling channel') - await stream.close({ + await stream.closeWrite({ signal }) diff --git a/packages/transport-webrtc/src/private-to-private/signaling-stream-handler.ts b/packages/transport-webrtc/src/private-to-private/signaling-stream-handler.ts index 34a31da8ed..5196f77428 100644 --- a/packages/transport-webrtc/src/private-to-private/signaling-stream-handler.ts +++ b/packages/transport-webrtc/src/private-to-private/signaling-stream-handler.ts @@ -1,20 +1,19 @@ +import { pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' -import { pbStream } from 'it-protobuf-stream' import { SDPHandshakeFailedError } from '../error.js' import { RTCSessionDescription } from '../webrtc/index.js' import { Message } from './pb/message.js' -import { getConnectionState, readCandidatesUntilConnected } from './util.js' +import { getConnectionState, getRemotePeer, readCandidatesUntilConnected } from './util.js' import type { RTCPeerConnection } from '../webrtc/index.js' -import type { Logger, IncomingStreamData } from '@libp2p/interface' +import type { AbortOptions, Connection, Logger, PeerId, Stream } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' -export interface IncomingStreamOpts extends IncomingStreamData { +export interface IncomingStreamOptions extends AbortOptions { peerConnection: RTCPeerConnection - signal: AbortSignal log: Logger } -export async function handleIncomingStream ({ peerConnection, stream, signal, connection, log }: IncomingStreamOpts): Promise<{ remoteAddress: Multiaddr }> { +export async function handleIncomingStream (stream: Stream, connection: Connection, { peerConnection, signal, log }: IncomingStreamOptions): Promise<{ remoteAddress: Multiaddr, remotePeer: PeerId }> { log.trace('new inbound signaling stream') const messageStream = pbStream(stream).pb(Message) @@ -101,9 +100,13 @@ export async function handleIncomingStream ({ peerConnection, stream, signal, co } } - const remoteAddress = multiaddr(`/webrtc/p2p/${connection.remoteAddr.getPeerId()}`) + const remotePeer = getRemotePeer(connection.remoteAddr) + const remoteAddress = multiaddr(`/webrtc/p2p/${remotePeer}`) log.trace('recipient connected to remote address %s', remoteAddress) - return { remoteAddress } + return { + remoteAddress, + remotePeer + } } diff --git a/packages/transport-webrtc/src/private-to-private/transport.ts b/packages/transport-webrtc/src/private-to-private/transport.ts index 19478c0a87..64d9eb9c0d 100644 --- a/packages/transport-webrtc/src/private-to-private/transport.ts +++ b/packages/transport-webrtc/src/private-to-private/transport.ts @@ -4,15 +4,16 @@ import { multiaddr } from '@multiformats/multiaddr' import { WebRTC } from '@multiformats/multiaddr-matcher' import { setMaxListeners } from 'main-event' import { SIGNALING_PROTOCOL } from '../constants.js' -import { WebRTCMultiaddrConnection } from '../maconn.js' import { DataChannelMuxerFactory } from '../muxer.js' +import { toMultiaddrConnection } from '../rtcpeerconnection-to-conn.ts' import { getRtcConfiguration } from '../util.js' import { RTCPeerConnection } from '../webrtc/index.js' import { initiateConnection } from './initiate-connection.js' import { WebRTCPeerListener } from './listener.js' import { handleIncomingStream } from './signaling-stream-handler.js' +import { getRemotePeer } from './util.ts' import type { DataChannelOptions } from '../index.js' -import type { OutboundConnectionUpgradeEvents, CreateListenerOptions, DialTransportOptions, Transport, Listener, Upgrader, ComponentLogger, Logger, Connection, PeerId, CounterGroup, Metrics, Startable, OpenConnectionProgressEvents, IncomingStreamData, Libp2pEvents } from '@libp2p/interface' +import type { OutboundConnectionUpgradeEvents, CreateListenerOptions, DialTransportOptions, Transport, Listener, Upgrader, ComponentLogger, Logger, Connection, PeerId, CounterGroup, Metrics, Startable, OpenConnectionProgressEvents, Libp2pEvents, MultiaddrConnection, Stream } from '@libp2p/interface' import type { Registrar, ConnectionManager, TransportManager } from '@libp2p/interface-internal' import type { Multiaddr } from '@multiformats/multiaddr' import type { TypedEventTarget } from 'main-event' @@ -115,13 +116,13 @@ export class WebRTCTransport implements Transport, Startable { } async start (): Promise { - await this.components.registrar.handle(SIGNALING_PROTOCOL, (data: IncomingStreamData) => { + await this.components.registrar.handle(SIGNALING_PROTOCOL, (stream: Stream, connection: Connection) => { // ensure we don't try to upgrade forever const signal = this.components.upgrader.createInboundAbortSignal(this.shutdownController.signal) - this._onProtocol(data, signal) + this._onProtocol(stream, connection, signal) .catch(err => { - this.log.error('failed to handle incoming connect from %p', data.connection.remotePeer, err) + this.log.error('failed to handle incoming connect from %p', connection.remotePeer, err) }) .finally(() => { signal.clear() @@ -180,16 +181,18 @@ export class WebRTCTransport implements Transport, Startable { onProgress: options.onProgress }) - const webRTCConn = new WebRTCMultiaddrConnection(this.components, { + const webRTCConn = toMultiaddrConnection({ peerConnection, - timeline: { open: Date.now() }, remoteAddr: remoteAddress, - metrics: this.metrics?.dialerEvents + metrics: this.metrics?.dialerEvents, + direction: 'outbound', + log: this.components.logger.forComponent('libp2p:webrtc:connection:outbound') }) const connection = await options.upgrader.upgradeOutbound(webRTCConn, { skipProtection: true, skipEncryption: true, + remotePeer: getRemotePeer(ma), muxerFactory, onProgress: options.onProgress, signal: options.signal @@ -201,37 +204,37 @@ export class WebRTCTransport implements Transport, Startable { return connection } - async _onProtocol ({ connection, stream }: IncomingStreamData, signal: AbortSignal): Promise { + async _onProtocol (stream: Stream, connection: Connection, signal: AbortSignal): Promise { const peerConnection = new RTCPeerConnection(await getRtcConfiguration(this.init.rtcConfiguration)) - const muxerFactory = new DataChannelMuxerFactory(this.components, { + const muxerFactory = new DataChannelMuxerFactory({ peerConnection, dataChannelOptions: this.init.dataChannel }) try { - const { remoteAddress } = await handleIncomingStream({ + const { remoteAddress, remotePeer } = await handleIncomingStream(stream, connection, { peerConnection, - connection, - stream, signal, log: this.log }) // close the stream if SDP messages have been exchanged successfully - await stream.close({ + await stream.closeWrite({ signal }) - const webRTCConn = new WebRTCMultiaddrConnection(this.components, { + const webRTCConn = toMultiaddrConnection({ peerConnection, - timeline: { open: (new Date()).getTime() }, remoteAddr: remoteAddress, - metrics: this.metrics?.listenerEvents + metrics: this.metrics?.listenerEvents, + direction: 'inbound', + log: this.components.logger.forComponent('libp2p:webrtc:connection:inbound') }) await this.components.upgrader.upgradeInbound(webRTCConn, { skipEncryption: true, skipProtection: true, + remotePeer, muxerFactory, signal }) @@ -247,10 +250,10 @@ export class WebRTCTransport implements Transport, Startable { } } - private _closeOnShutdown (pc: RTCPeerConnection, webRTCConn: WebRTCMultiaddrConnection): void { + private _closeOnShutdown (pc: RTCPeerConnection, webRTCConn: MultiaddrConnection): void { // close the connection on shut down const shutDownListener = (): void => { - webRTCConn.close() + webRTCConn.closeWrite() .catch(err => { this.log.error('could not close WebRTCMultiaddrConnection', err) }) diff --git a/packages/transport-webrtc/src/private-to-private/util.ts b/packages/transport-webrtc/src/private-to-private/util.ts index e2af91f898..6ec7946b1c 100644 --- a/packages/transport-webrtc/src/private-to-private/util.ts +++ b/packages/transport-webrtc/src/private-to-private/util.ts @@ -1,4 +1,5 @@ -import { ConnectionFailedError, InvalidMessageError } from '@libp2p/interface' +import { ConnectionFailedError, InvalidMessageError, InvalidMultiaddrError } from '@libp2p/interface' +import { peerIdFromString } from '@libp2p/peer-id' import pDefer from 'p-defer' import { CustomProgressEvent } from 'progress-events' import { isFirefox } from '../util.js' @@ -6,7 +7,8 @@ import { RTCIceCandidate } from '../webrtc/index.js' import { Message } from './pb/message.js' import type { WebRTCDialEvents } from './transport.js' import type { RTCPeerConnection } from '../webrtc/index.js' -import type { AbortOptions, LoggerOptions, Stream } from '@libp2p/interface' +import type { AbortOptions, LoggerOptions, PeerId, Stream } from '@libp2p/interface' +import type { Multiaddr } from '@multiformats/multiaddr' import type { MessageStream } from 'it-protobuf-stream' import type { DeferredPromise } from 'p-defer' import type { ProgressOptions } from 'progress-events' @@ -94,3 +96,19 @@ function resolveOnConnected (pc: RTCPeerConnection, promise: DeferredPromise { @@ -137,7 +136,7 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s case 'failed': case 'disconnected': case 'closed': - maConn.close().catch((err) => { + maConn.closeWrite().catch((err) => { options.log.error('error closing connection', err) maConn.abort(err) }) @@ -150,7 +149,7 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s // Track opened peer connection options.events?.increment({ peer_connection: true }) - const muxerFactory = new DataChannelMuxerFactory(options, { + const muxerFactory = new DataChannelMuxerFactory({ peerConnection, metrics: options.events, dataChannelOptions: options.dataChannel @@ -161,8 +160,8 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s // handshake. Therefore, we need to secure an inbound noise connection // from the server. options.log.trace('%s secure inbound', options.role) - await connectionEncrypter.secureInbound(handshakeStream, { - remotePeer: options.remotePeerId, + const result = await connectionEncrypter.secureInbound(handshakeStream, { + remotePeer: options.remotePeer, signal: options.signal, skipStreamMuxerNegotiation: true }) @@ -171,6 +170,7 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s return await options.upgrader.upgradeOutbound(maConn, { skipProtection: true, skipEncryption: true, + remotePeer: result.remotePeer, muxerFactory, signal: options.signal }) @@ -181,7 +181,7 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s // the client. options.log.trace('%s secure outbound', options.role) const result = await connectionEncrypter.secureOutbound(handshakeStream, { - remotePeer: options.remotePeerId, + remotePeer: options.remotePeer, signal: options.signal, skipStreamMuxerNegotiation: true }) @@ -193,6 +193,7 @@ export async function connect (peerConnection: DirectRTCPeerConnection, ufrag: s await options.upgrader.upgradeInbound(maConn, { skipProtection: true, skipEncryption: true, + remotePeer: result.remotePeer, muxerFactory, signal: options.signal }) diff --git a/packages/transport-webrtc/src/rtcpeerconnection-to-conn.ts b/packages/transport-webrtc/src/rtcpeerconnection-to-conn.ts new file mode 100644 index 0000000000..41b8981154 --- /dev/null +++ b/packages/transport-webrtc/src/rtcpeerconnection-to-conn.ts @@ -0,0 +1,66 @@ +import { AbstractMultiaddrConnection } from '@libp2p/utils' +import type { RTCPeerConnection } from './webrtc/index.js' +import type { AbortOptions, MultiaddrConnection } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface RTCPeerConnectionMultiaddrConnectionInit extends Omit { + peerConnection: RTCPeerConnection +} + +class RTCPeerConnectionMultiaddrConnection extends AbstractMultiaddrConnection { + private peerConnection: RTCPeerConnection + + constructor (init: RTCPeerConnectionMultiaddrConnectionInit) { + super(init) + + this.peerConnection = init.peerConnection + + const initialState = init.peerConnection.connectionState + + this.peerConnection.onconnectionstatechange = () => { + this.log.trace('peer connection state change %s initial state %s', this.peerConnection.connectionState, initialState) + + if (this.peerConnection.connectionState === 'disconnected' || this.peerConnection.connectionState === 'failed' || this.peerConnection.connectionState === 'closed') { + // nothing else to do but close the connection + this.onRemoteCloseWrite() + } + } + } + + sendData (data: Uint8ArrayList): SendResult { + return { + sentBytes: data.byteLength, + canSendMore: true + } + } + + async sendCloseWrite (options?: AbortOptions): Promise { + this.peerConnection.close() + options?.signal?.throwIfAborted() + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendReset (): void { + this.peerConnection.close() + } + + sendPause (): void { + // TODO: readable backpressure? + } + + sendResume (): void { + // TODO: readable backpressure? + } +} + +/** + * Convert a RTCPeerConnection into a MultiaddrConnection + * https://github.com/libp2p/interface-transport#multiaddrconnection + */ +export const toMultiaddrConnection = (init: RTCPeerConnectionMultiaddrConnectionInit): MultiaddrConnection => { + return new RTCPeerConnectionMultiaddrConnection(init) +} diff --git a/packages/transport-webrtc/src/stream.ts b/packages/transport-webrtc/src/stream.ts index 918a138ce0..9e1432bbdf 100644 --- a/packages/transport-webrtc/src/stream.ts +++ b/packages/transport-webrtc/src/stream.ts @@ -1,20 +1,14 @@ -import { StreamStateError, TimeoutError } from '@libp2p/interface' -import { AbstractStream } from '@libp2p/utils/abstract-stream' -import { anySignal } from 'any-signal' +import { StreamStateError } from '@libp2p/interface' +import { AbstractStream } from '@libp2p/utils' import * as lengthPrefixed from 'it-length-prefixed' import { pushable } from 'it-pushable' -import pDefer from 'p-defer' -import pTimeout from 'p-timeout' -import { raceEvent } from 'race-event' -import { raceSignal } from 'race-signal' import { Uint8ArrayList } from 'uint8arraylist' -import { BUFFERED_AMOUNT_LOW_TIMEOUT, FIN_ACK_TIMEOUT, MAX_BUFFERED_AMOUNT, MAX_MESSAGE_SIZE, OPEN_TIMEOUT, PROTOBUF_OVERHEAD } from './constants.js' +import { MAX_BUFFERED_AMOUNT, MAX_MESSAGE_SIZE, PROTOBUF_OVERHEAD } from './constants.js' import { Message } from './private-to-public/pb/message.js' import type { DataChannelOptions } from './index.js' -import type { AbortOptions, Direction, Logger } from '@libp2p/interface' -import type { AbstractStreamInit } from '@libp2p/utils/abstract-stream' +import type { AbortOptions, MessageStreamDirection, Logger } from '@libp2p/interface' +import type { AbstractStreamInit, SendResult } from '@libp2p/utils' import type { Pushable } from 'it-pushable' -import type { DeferredPromise } from 'p-defer' export interface WebRTCStreamInit extends AbstractStreamInit, DataChannelOptions { /** @@ -42,22 +36,13 @@ export class WebRTCStream extends AbstractStream { private readonly maxBufferedAmount: number - private readonly bufferedAmountLowEventTimeout: number - /** * The maximum size of a message in bytes */ private readonly maxMessageSize: number - /** - * When this promise is resolved, the remote has sent us a FIN flag - */ - private readonly receiveFinAck: DeferredPromise - private readonly finAckTimeout: number - private readonly openTimeout: number - private readonly closeController: AbortController - constructor (init: WebRTCStreamInit) { + /* // override onEnd to send/receive FIN_ACK before closing the stream const originalOnEnd = init.onEnd init.onEnd = (err?: Error): void => { @@ -91,24 +76,19 @@ export class WebRTCStream extends AbstractStream { this.channel.close() }) } + */ super(init) this.channel = init.channel this.channel.binaryType = 'arraybuffer' this.incomingData = pushable() - this.bufferedAmountLowEventTimeout = init.bufferedAmountLowEventTimeout ?? BUFFERED_AMOUNT_LOW_TIMEOUT this.maxBufferedAmount = init.maxBufferedAmount ?? MAX_BUFFERED_AMOUNT this.maxMessageSize = (init.maxMessageSize ?? MAX_MESSAGE_SIZE) - PROTOBUF_OVERHEAD - this.receiveFinAck = pDefer() - this.finAckTimeout = init.closeTimeout ?? FIN_ACK_TIMEOUT - this.openTimeout = init.openTimeout ?? OPEN_TIMEOUT - this.closeController = new AbortController() // set up initial state switch (this.channel.readyState) { case 'open': - this.timeline.open = new Date().getTime() break case 'closed': @@ -127,31 +107,15 @@ export class WebRTCStream extends AbstractStream { } // handle RTCDataChannel events - this.channel.onopen = (_evt) => { - this.timeline.open = new Date().getTime() - } - this.channel.onclose = (_evt) => { this.log.trace('received onclose event') - // stop any in-progress writes - this.closeController.abort() - - // if the channel has closed we'll never receive a FIN_ACK so resolve the - // promise so we don't try to wait later - this.receiveFinAck.resolve() - - void this.close().catch(err => { - this.log.error('error closing stream after channel closed', err) - }) + this.onRemoteCloseWrite() } this.channel.onerror = (evt) => { this.log.trace('received onerror event') - // stop any in-progress writes - this.closeController.abort() - const err = (evt as RTCErrorEvent).error this.abort(err) } @@ -166,6 +130,10 @@ export class WebRTCStream extends AbstractStream { this.incomingData.push(new Uint8Array(data, 0, data.byteLength)) } + this.channel.onbufferedamountlow = () => { + this.safeDispatchEvent('drain') + } + const self = this // pipe framed protobuf messages through a length prefixed decoder, and @@ -175,7 +143,7 @@ export class WebRTCStream extends AbstractStream { const message = self.processIncomingProtobuf(buf) if (message != null) { - self.sourcePush(new Uint8ArrayList(message)) + self.onData(new Uint8ArrayList(message)) } } }) @@ -188,49 +156,11 @@ export class WebRTCStream extends AbstractStream { // opening new streams is handled by WebRTC so this is a noop } - async _sendMessage (data: Uint8ArrayList, checkBuffer: boolean = true): Promise { + _sendMessage (data: Uint8ArrayList): void { if (this.channel.readyState === 'closed' || this.channel.readyState === 'closing') { throw new StreamStateError(`Invalid datachannel state - ${this.channel.readyState}`) } - if (this.channel.readyState !== 'open') { - const timeout = AbortSignal.timeout(this.openTimeout) - const signal = anySignal([ - this.closeController.signal, - timeout - ]) - - try { - this.log('channel state is "%s" and not "open", waiting for "open" event before sending data', this.channel.readyState) - await raceEvent(this.channel, 'open', signal) - } finally { - signal.clear() - } - - this.log('channel state is now "%s", sending data', this.channel.readyState) - } - - if (checkBuffer && this.channel.bufferedAmount > this.maxBufferedAmount) { - const timeout = AbortSignal.timeout(this.bufferedAmountLowEventTimeout) - const signal = anySignal([ - this.closeController.signal, - timeout - ]) - - try { - this.log('channel buffer is %d, wait for "bufferedamountlow" event', this.channel.bufferedAmount) - await raceEvent(this.channel, 'bufferedamountlow', signal) - } catch (err: any) { - if (timeout.aborted) { - throw new TimeoutError(`Timed out waiting for DataChannel buffer to clear after ${this.bufferedAmountLowEventTimeout}ms`) - } - - throw err - } finally { - signal.clear() - } - } - try { this.log.trace('sending message, channel state "%s"', this.channel.readyState) // send message without copying data @@ -240,29 +170,22 @@ export class WebRTCStream extends AbstractStream { } } - async sendData (data: Uint8ArrayList): Promise { - const bytesTotal = data.byteLength - // sending messages is an async operation so use a copy of the list as it - // may be changed beneath us - data = data.sublist() - - while (data.byteLength > 0) { - const toSend = Math.min(data.byteLength, this.maxMessageSize) - const buf = data.subarray(0, toSend) - const messageBuf = Message.encode({ message: buf }) - const sendBuf = lengthPrefixed.encode.single(messageBuf) - this.log.trace('sending %d/%d bytes on channel', buf.byteLength, bytesTotal) - await this._sendMessage(sendBuf) - - data.consume(toSend) - } + sendData (data: Uint8ArrayList): SendResult { + const messageBuf = Message.encode({ + message: data.subarray() + }) + const prefixedBuf = lengthPrefixed.encode.single(messageBuf) + this._sendMessage(prefixedBuf) - this.log.trace('finished sending data, channel state "%s"', this.channel.readyState) + return { + sentBytes: data.byteLength, + canSendMore: this.channel.bufferedAmount > this.maxBufferedAmount + } } - async sendReset (): Promise { + sendReset (): void { try { - await this._sendFlag(Message.Flag.RESET) + this._sendFlag(Message.Flag.RESET) } catch (err) { this.log.error('failed to send reset - %e', err) } finally { @@ -271,37 +194,19 @@ export class WebRTCStream extends AbstractStream { } async sendCloseWrite (options: AbortOptions): Promise { - if (this.channel.readyState !== 'open') { - this.receiveFinAck.resolve() - return + if (this.channel.readyState === 'open') { + this._sendFlag(Message.Flag.FIN) } - const sent = await this._sendFlag(Message.Flag.FIN) - - if (sent) { - this.log.trace('awaiting FIN_ACK') - try { - await raceSignal(this.receiveFinAck.promise, options?.signal, { - errorMessage: 'sending close-write was aborted before FIN_ACK was received', - errorName: 'FinAckNotReceivedError' - }) - } catch (err) { - this.log.error('failed to await FIN_ACK', err) - } - } else { - this.log.trace('sending FIN failed, not awaiting FIN_ACK') - } - - // if we've attempted to receive a FIN_ACK, do not try again - this.receiveFinAck.resolve() + options.signal?.throwIfAborted() } - async sendCloseRead (): Promise { - if (this.channel.readyState !== 'open') { - return + async sendCloseRead (options: AbortOptions): Promise { + if (this.channel.readyState === 'open') { + this._sendFlag(Message.Flag.STOP_SENDING) } - await this._sendFlag(Message.Flag.STOP_SENDING) + options.signal?.throwIfAborted() } /** @@ -315,38 +220,27 @@ export class WebRTCStream extends AbstractStream { if (message.flag === Message.Flag.FIN) { // We should expect no more data from the remote, stop reading - this.remoteCloseWrite() - - this.log.trace('sending FIN_ACK') - void this._sendFlag(Message.Flag.FIN_ACK) - .catch(err => { - this.log.error('error sending FIN_ACK immediately', err) - }) + this.onRemoteCloseWrite() } if (message.flag === Message.Flag.RESET) { // Stop reading and writing to the stream immediately - this.reset() + this.onRemoteReset() } if (message.flag === Message.Flag.STOP_SENDING) { // The remote has stopped reading - this.remoteCloseRead() - } - - if (message.flag === Message.Flag.FIN_ACK) { - this.log.trace('received FIN_ACK') - this.receiveFinAck.resolve() + this.onRemoteCloseRead() } } // ignore data messages if we've closed the readable end already - if (this.readStatus === 'ready') { + if (this.readStatus === 'readable') { return message.message } } - private async _sendFlag (flag: Message.Flag): Promise { + private _sendFlag (flag: Message.Flag): boolean { if (this.channel.readyState !== 'open') { // flags can be sent while we or the remote are closing the datachannel so // if the channel isn't open, don't try to send it but return false to let @@ -360,7 +254,7 @@ export class WebRTCStream extends AbstractStream { const prefixedBuf = lengthPrefixed.encode.single(messageBuf) try { - await this._sendMessage(prefixedBuf, false) + this._sendMessage(prefixedBuf) return true } catch (err: any) { @@ -369,6 +263,14 @@ export class WebRTCStream extends AbstractStream { return false } + + sendPause (): void { + // TODO: read backpressure? + } + + sendResume (): void { + // TODO: read backpressure? + } } export interface WebRTCStreamOptions extends DataChannelOptions { @@ -383,12 +285,7 @@ export interface WebRTCStreamOptions extends DataChannelOptions { /** * The stream direction */ - direction: Direction - - /** - * A callback invoked when the channel ends - */ - onEnd?(err?: Error | undefined): void + direction: MessageStreamDirection /** * The logger to create a scope from @@ -399,15 +296,15 @@ export interface WebRTCStreamOptions extends DataChannelOptions { * If true the underlying datachannel is being used to perform the noise * handshake during connection establishment */ - handshake?: boolean + isHandshake?: boolean } export function createStream (options: WebRTCStreamOptions): WebRTCStream { - const { channel, direction, handshake } = options + const { channel, direction, isHandshake } = options return new WebRTCStream({ ...options, id: `${channel.id}`, - log: options.log.newScope(`${handshake === true ? 'handshake' : direction}:${channel.id}`) + log: options.log.newScope(`${isHandshake === true ? 'handshake' : direction}:${channel.id}`) }) } diff --git a/packages/transport-webrtc/src/util.ts b/packages/transport-webrtc/src/util.ts index 210ef9e829..9b0eef91e9 100644 --- a/packages/transport-webrtc/src/util.ts +++ b/packages/transport-webrtc/src/util.ts @@ -4,6 +4,7 @@ import pTimeout from 'p-timeout' import { DATA_CHANNEL_DRAIN_TIMEOUT, DEFAULT_ICE_SERVERS, UFRAG_ALPHABET, UFRAG_PREFIX } from './constants.js' import type { PeerConnection } from '@ipshipyard/node-datachannel' import type { LoggerOptions } from '@libp2p/interface' +import type { Duplex, Source } from 'it-stream-types' const browser = detect() export const isFirefox = ((browser != null) && browser.name === 'firefox') @@ -12,6 +13,26 @@ export const nopSource = async function * nop (): AsyncGenerator => {} +// Duplex that does nothing. Needed to fulfill the interface +export function inertDuplex (): Duplex { + return { + source: { + [Symbol.asyncIterator] () { + return { + async next () { + // This will never resolve + return new Promise(() => { }) + } + } + } + }, + sink: async (source: Source) => { + // This will never resolve + return new Promise(() => { }) + } + } +} + export function drainAndClose (channel: RTCDataChannel, direction: string, drainTimeout: number = DATA_CHANNEL_DRAIN_TIMEOUT, options: LoggerOptions): void { if (channel.readyState !== 'open') { return diff --git a/packages/transport-webrtc/test/maconn.spec.ts b/packages/transport-webrtc/test/maconn.spec.ts index 0328800521..c61c5bd7a8 100644 --- a/packages/transport-webrtc/test/maconn.spec.ts +++ b/packages/transport-webrtc/test/maconn.spec.ts @@ -4,7 +4,7 @@ import { defaultLogger } from '@libp2p/logger' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import { stubObject } from 'sinon-ts' -import { WebRTCMultiaddrConnection } from '../src/maconn.js' +import { toMultiaddrConnection } from '../src/rtcpeerconnection-to-conn.ts' import { RTCPeerConnection } from '../src/webrtc/index.js' import type { CounterGroup } from '@libp2p/interface' @@ -17,20 +17,17 @@ describe('Multiaddr Connection', () => { increment: () => {}, reset: () => {} }) - const maConn = new WebRTCMultiaddrConnection({ - logger: defaultLogger() - }, { + const maConn = toMultiaddrConnection({ peerConnection, remoteAddr, - timeline: { - open: (new Date()).getTime() - }, - metrics + metrics, + direction: 'outbound', + log: defaultLogger().forComponent('libp2p:webrtc:connection') }) expect(maConn.timeline.close).to.be.undefined - await maConn.close() + await maConn.closeWrite() expect(maConn.timeline.close).to.not.be.undefined expect(metrics.increment.calledWith({ close: true })).to.be.true diff --git a/packages/transport-webrtc/test/muxer.spec.ts b/packages/transport-webrtc/test/muxer.spec.ts index acf3e337f7..bb2ea0f5b0 100644 --- a/packages/transport-webrtc/test/muxer.spec.ts +++ b/packages/transport-webrtc/test/muxer.spec.ts @@ -3,17 +3,16 @@ import { expect } from 'aegir/chai' import pRetry from 'p-retry' import { stubInterface } from 'sinon-ts' import { DataChannelMuxerFactory } from '../src/muxer.js' +import type { MultiaddrConnection } from '@libp2p/interface' describe('muxer', () => { - it('should delay notification of early streams', async () => { + it.skip('should delay notification of early streams', async () => { let onIncomingStreamInvoked = false // @ts-expect-error incomplete implementation const peerConnection: RTCPeerConnection = {} const muxerFactory = new DataChannelMuxerFactory({ - logger: defaultLogger() - }, { peerConnection }) @@ -26,10 +25,12 @@ describe('muxer', () => { } peerConnection.ondatachannel?.(event) - muxerFactory.createStreamMuxer({ - onIncomingStream: () => { - onIncomingStreamInvoked = true - } + const muxer = muxerFactory.createStreamMuxer(stubInterface({ + log: defaultLogger().forComponent('libp2p:maconn') + })) + + muxer.addEventListener('stream', () => { + onIncomingStreamInvoked = true }) expect(onIncomingStreamInvoked).to.be.false() diff --git a/packages/transport-webrtc/test/peer.spec.ts b/packages/transport-webrtc/test/peer.spec.ts index cfeb4a9797..1cd935107c 100644 --- a/packages/transport-webrtc/test/peer.spec.ts +++ b/packages/transport-webrtc/test/peer.spec.ts @@ -1,13 +1,11 @@ import { generateKeyPair } from '@libp2p/crypto/keys' -import { streamPair } from '@libp2p/interface-compliance-tests/mocks' import { defaultLogger, logger } from '@libp2p/logger' import { peerIdFromPrivateKey } from '@libp2p/peer-id' +import { streamPair, pbStream } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { expect } from 'aegir/chai' import delay from 'delay' import { detect } from 'detect-browser' -import { duplexPair } from 'it-pair/duplex' -import { pbStream } from 'it-protobuf-stream' import { TypedEventEmitter } from 'main-event' import pRetry from 'p-retry' import Sinon from 'sinon' @@ -54,18 +52,7 @@ async function getComponents (): Promise { const initiatorPeerId = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) const receiverPeerId = peerIdFromPrivateKey(await generateKeyPair('Ed25519')) const receiverMultiaddr = multiaddr(`/ip4/123.123.123.123/tcp/123/p2p/${relayPeerId}/p2p-circuit/webrtc/p2p/${receiverPeerId}`) - const [initiatorToReceiver, receiverToInitiator] = duplexPair() - const [initiatorStream, receiverStream] = streamPair({ - duplex: initiatorToReceiver, - init: { - protocol: SIGNALING_PROTOCOL - } - }, { - duplex: receiverToInitiator, - init: { - protocol: SIGNALING_PROTOCOL - } - }) + const [initiatorStream, receiverStream] = await streamPair() const recipientAbortController = new AbortController() @@ -118,7 +105,7 @@ describe('webrtc basic', () => { ;[{ peerConnection: initiatorPeerConnection }] = await expect( Promise.all([ initiateConnection(initiator), - handleIncomingStream(recipient) + handleIncomingStream(recipient.stream, recipient.connection, recipient) ]) ).to.eventually.be.fulfilled() @@ -157,7 +144,7 @@ describe('webrtc basic', () => { ...initiator, signal: abortController.signal }), - handleIncomingStream(recipient) + handleIncomingStream(recipient.stream, recipient.connection, recipient) ])) .to.eventually.be.rejected.with.property('message').that.matches(/Oh noes!/) }) @@ -173,7 +160,7 @@ describe('webrtc receiver', () => { it('should fail receiving on invalid sdp offer', async () => { ({ initiator, recipient } = await getComponents()) - const receiverPeerConnectionPromise = handleIncomingStream(recipient) + const receiverPeerConnectionPromise = handleIncomingStream(recipient.stream, recipient.connection, recipient) const stream = pbStream(initiator.stream).pb(Message) await stream.write({ type: Message.Type.SDP_OFFER, data: 'bad' }) diff --git a/packages/transport-webrtc/test/stream.spec.ts b/packages/transport-webrtc/test/stream.spec.ts index f4c48478f6..33aa2839dc 100644 --- a/packages/transport-webrtc/test/stream.spec.ts +++ b/packages/transport-webrtc/test/stream.spec.ts @@ -1,29 +1,22 @@ import { defaultLogger } from '@libp2p/logger' import { expect } from 'aegir/chai' import delay from 'delay' -import length from 'it-length' import * as lengthPrefixed from 'it-length-prefixed' -import { pushable } from 'it-pushable' import { bytes } from 'multiformats' -import pDefer from 'p-defer' -import { Uint8ArrayList } from 'uint8arraylist' -import { MAX_BUFFERED_AMOUNT, MAX_MESSAGE_SIZE, PROTOBUF_OVERHEAD } from '../src/constants.js' +import { stubInterface } from 'sinon-ts' +import { MAX_MESSAGE_SIZE, PROTOBUF_OVERHEAD } from '../src/constants.js' import { Message } from '../src/private-to-public/pb/message.js' import { createStream } from '../src/stream.js' import { RTCPeerConnection } from '../src/webrtc/index.js' -import { mockDataChannel, receiveFinAck } from './util.js' +import { receiveFinAck } from './util.js' import type { WebRTCStream } from '../src/stream.js' import type { Stream } from '@libp2p/interface' describe('Max message size', () => { it(`sends messages smaller or equal to ${MAX_MESSAGE_SIZE} bytes in one`, async () => { - const sent: Uint8ArrayList = new Uint8ArrayList() const data = new Uint8Array(MAX_MESSAGE_SIZE - PROTOBUF_OVERHEAD) - const p = pushable() - const channel = mockDataChannel({ - send: (bytes) => { - sent.append(bytes) - } + const channel = stubInterface({ + readyState: 'open' }) // Make sure that a message with all fields will be exactly MAX_MESSAGE_SIZE @@ -31,8 +24,8 @@ describe('Max message size', () => { flag: Message.Flag.STOP_SENDING, message: data })) + expect(messageLengthEncoded).to.have.lengthOf(MAX_MESSAGE_SIZE) - expect(messageLengthEncoded.length).eq(MAX_MESSAGE_SIZE) const webrtcStream = createStream({ channel, direction: 'outbound', @@ -40,81 +33,34 @@ describe('Max message size', () => { log: defaultLogger().forComponent('test') }) - p.push(data) - p.end() - receiveFinAck(channel) - await webrtcStream.sink(p) + webrtcStream.send(data) + await webrtcStream.close() - expect(length(sent)).to.be.gt(1) - - for (const buf of sent) { - expect(buf.byteLength).to.be.lessThanOrEqual(MAX_MESSAGE_SIZE) - } + expect(channel.send).to.have.property('callCount', 1) + expect(channel.send.getCall(0).args[0]).to.have.lengthOf(MAX_MESSAGE_SIZE) }) it(`sends messages greater than ${MAX_MESSAGE_SIZE} bytes in parts`, async () => { - const sent: Uint8ArrayList = new Uint8ArrayList() const data = new Uint8Array(MAX_MESSAGE_SIZE) - const p = pushable() - const channel = mockDataChannel({ - send: (bytes) => { - sent.append(bytes) - } + const channel = stubInterface({ + readyState: 'open' }) - // Make sure that the data that ought to be sent will result in a message with exactly MAX_MESSAGE_SIZE + 1 - // const messageLengthEncoded = lengthPrefixed.encode.single(Message.encode({ message: data })).subarray() - // expect(messageLengthEncoded.length).eq(MAX_MESSAGE_SIZE + 1) - const webrtcStream = createStream({ channel, direction: 'outbound', log: defaultLogger().forComponent('test') }) - p.push(data) - p.end() - receiveFinAck(channel) - await webrtcStream.sink(p) + webrtcStream.send(data) + await webrtcStream.close() - expect(length(sent)).to.be.gt(1) + expect(channel.send).to.have.property('callCount').that.is.greaterThan(1) - for (const buf of sent) { - expect(buf.byteLength).to.be.lessThanOrEqual(MAX_MESSAGE_SIZE) + for (let i = 0; i < channel.send.callCount; i++) { + expect(channel.send.getCall(i).args[0]).to.have.length.that.is.lessThanOrEqual(MAX_MESSAGE_SIZE) } }) - - it('closes the stream if buffer amount low timeout', async () => { - const timeout = 100 - const closed = pDefer() - const channel = mockDataChannel({ - send: () => { - throw new Error('Expected to not send') - }, - bufferedAmount: MAX_BUFFERED_AMOUNT + 1 - }) - const webrtcStream = createStream({ - bufferedAmountLowEventTimeout: timeout, - closeTimeout: 1, - channel, - direction: 'outbound', - onEnd: () => { - closed.resolve() - }, - log: defaultLogger().forComponent('test') - }) - - const t0 = Date.now() - - await expect(webrtcStream.sink([new Uint8Array(1)])).to.eventually.be.rejected - .with.property('name', 'TimeoutError') - const t1 = Date.now() - expect(t1 - t0).greaterThanOrEqual(timeout) - expect(t1 - t0).lessThan(timeout + 1000) // Some upper bound - await closed.promise - expect(webrtcStream.timeline.close).to.be.greaterThan(webrtcStream.timeline.open) - expect(webrtcStream.timeline.abort).to.be.greaterThan(webrtcStream.timeline.open) - }) }) const TEST_MESSAGE = 'test_message' @@ -211,7 +157,7 @@ describe('Stream Stats', () => { it('reset = close', () => { expect(stream.timeline.close).to.not.exist() - stream.reset() // only resets the write side + stream.onRemoteReset() // only resets the write side expect(stream.timeline.close).to.be.a('number') expect(stream.timeline.close).to.be.greaterThanOrEqual(stream.timeline.open) expect(stream.timeline.closeWrite).to.be.greaterThanOrEqual(stream.timeline.open) diff --git a/packages/transport-websockets/package.json b/packages/transport-websockets/package.json index 8f572c3d6b..8a6dacb5d9 100644 --- a/packages/transport-websockets/package.json +++ b/packages/transport-websockets/package.json @@ -75,10 +75,11 @@ "@types/ws": "^8.18.1", "it-ws": "^6.1.5", "main-event": "^1.0.1", - "p-defer": "^4.0.1", "p-event": "^6.0.1", "progress-events": "^1.0.1", - "race-signal": "^1.1.3", + "race-event": "^1.5.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0", "ws": "^8.18.2" }, "devDependencies": { @@ -87,7 +88,8 @@ "is-loopback-addr": "^2.0.2", "p-wait-for": "^5.0.2", "sinon": "^20.0.0", - "sinon-ts": "^2.0.0" + "sinon-ts": "^2.0.0", + "undici": "^7.11.0" }, "browser": { "./dist/src/listener.js": "./dist/src/listener.browser.js" diff --git a/packages/transport-websockets/src/index.ts b/packages/transport-websockets/src/index.ts index 0785a134b6..a88e108510 100644 --- a/packages/transport-websockets/src/index.ts +++ b/packages/transport-websockets/src/index.ts @@ -25,17 +25,14 @@ import { transportSymbol, serviceCapabilities, ConnectionFailedError } from '@libp2p/interface' import { multiaddrToUri as toUri } from '@multiformats/multiaddr-to-uri' -import { connect } from 'it-ws/client' -import pDefer from 'p-defer' import { CustomProgressEvent } from 'progress-events' -import { raceSignal } from 'race-signal' +import { raceEvent } from 'race-event' import * as filters from './filters.js' import { createListener } from './listener.js' -import { socketToMaConn } from './socket-to-conn.js' +import { socketToMaConn } from './websocket-to-conn.js' import type { Transport, MultiaddrFilter, CreateListenerOptions, DialTransportOptions, Listener, AbortOptions, ComponentLogger, Logger, Connection, OutboundConnectionUpgradeEvents, Metrics, CounterGroup, Libp2pEvents } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' import type { WebSocketOptions } from 'it-ws/client' -import type { DuplexWebSocket } from 'it-ws/duplex' import type { TypedEventTarget } from 'main-event' import type http from 'node:http' import type https from 'node:https' @@ -50,6 +47,8 @@ export interface WebSocketsInit extends AbortOptions, WebSocketOptions { /** * Options used to create WebSockets + * + * @deprecated This option will be removed in a future release */ websocket?: ClientOptions @@ -121,10 +120,12 @@ class WebSockets implements Transport { this.log('dialing %s', ma) options = options ?? {} - const socket = await this._connect(ma, options) - const maConn = socketToMaConn(socket, ma, { - logger: this.logger, - metrics: this.metrics?.dialerEvents + const maConn = socketToMaConn({ + websocket: await this._connect(ma, options), + remoteAddr: ma, + metrics: this.metrics?.dialerEvents, + direction: 'outbound', + log: this.components.logger.forComponent('libp2p:websockets:connection:outbound') }) this.log('new outbound connection %s', maConn.remoteAddr) @@ -133,43 +134,36 @@ class WebSockets implements Transport { return conn } - async _connect (ma: Multiaddr, options: DialTransportOptions): Promise { + async _connect (ma: Multiaddr, options: DialTransportOptions): Promise { options?.signal?.throwIfAborted() - const cOpts = ma.toOptions() - this.log('dialing %s:%s', cOpts.host, cOpts.port) - - const errorPromise = pDefer() - const rawSocket = connect(toUri(ma), this.init) - rawSocket.socket.addEventListener('error', () => { - // the WebSocket.ErrorEvent type doesn't actually give us any useful - // information about what happened - // https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/error_event - const err = new ConnectionFailedError(`Could not connect to ${ma.toString()}`) - this.log.error('connection error:', err) - this.metrics?.dialerEvents.increment({ error: true }) - errorPromise.reject(err) - }) + const uri = toUri(ma) + this.log('create websocket connection to %s', uri) + const websocket = new WebSocket(uri) try { options.onProgress?.(new CustomProgressEvent('websockets:open-connection')) - await raceSignal(Promise.race([rawSocket.connected(), errorPromise.promise]), options.signal) + await raceEvent(websocket, 'open', options.signal, { + errorEvent: 'error', + error: new ConnectionFailedError(`Could not connect to ${uri}`) + }) } catch (err: any) { if (options.signal?.aborted) { this.metrics?.dialerEvents.increment({ abort: true }) + } else { + this.metrics?.dialerEvents.increment({ error: true }) } - rawSocket.close() - .catch(err => { - this.log.error('error closing raw socket', err) - }) + try { + websocket.close() + } catch {} throw err } this.log('connected %s', ma) this.metrics?.dialerEvents.increment({ connect: true }) - return rawSocket + return websocket } /** diff --git a/packages/transport-websockets/src/listener.ts b/packages/transport-websockets/src/listener.ts index 0bb1c39c7d..3b25a87e90 100644 --- a/packages/transport-websockets/src/listener.ts +++ b/packages/transport-websockets/src/listener.ts @@ -1,18 +1,16 @@ import http from 'node:http' import https from 'node:https' import net from 'node:net' -import { getThinWaistAddresses } from '@libp2p/utils/get-thin-waist-addresses' -import { ipPortToMultiaddr as toMultiaddr } from '@libp2p/utils/ip-port-to-multiaddr' +import { getThinWaistAddresses, ipPortToMultiaddr as toMultiaddr } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { WebSockets, WebSocketsSecure } from '@multiformats/multiaddr-matcher' -import duplex from 'it-ws/duplex' import { TypedEventEmitter, setMaxListeners } from 'main-event' import { pEvent } from 'p-event' import * as ws from 'ws' -import { socketToMaConn } from './socket-to-conn.js' +import { toWebSocket } from './utils.ts' +import { socketToMaConn } from './websocket-to-conn.js' import type { ComponentLogger, Logger, Listener, ListenerEvents, CreateListenerOptions, CounterGroup, MetricGroup, Metrics, TLSCertificate, Libp2pEvents, Upgrader, MultiaddrConnection } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' -import type { DuplexWebSocket } from 'it-ws/duplex' import type { TypedEventTarget } from 'main-event' import type { EventEmitter } from 'node:events' import type { Server } from 'node:http' @@ -40,8 +38,8 @@ export interface WebSocketListenerMetrics { } export class WebSocketListener extends TypedEventEmitter implements Listener { + private components: WebSocketListenerComponents private readonly log: Logger - private readonly logger: ComponentLogger private readonly server: net.Server private readonly wsServer: ws.WebSocketServer private readonly metrics: WebSocketListenerMetrics @@ -58,8 +56,8 @@ export class WebSocketListener extends TypedEventEmitter impleme constructor (components: WebSocketListenerComponents, init: WebSocketListenerInit) { super() + this.components = components this.log = components.logger.forComponent('libp2p:websockets:listener') - this.logger = components.logger this.upgrader = init.upgrader this.httpOptions = init.http this.httpsOptions = init.https ?? init.http @@ -189,22 +187,16 @@ export class WebSocketListener extends TypedEventEmitter impleme return } - const stream: DuplexWebSocket = { - ...duplex(socket, { - remoteAddress: req.socket.remoteAddress ?? '0.0.0.0', - remotePort: req.socket.remotePort ?? 0 - }), - localAddress: addr.address, - localPort: addr.port - } - let maConn: MultiaddrConnection try { - maConn = socketToMaConn(stream, toMultiaddr(stream.remoteAddress ?? '', stream.remotePort ?? 0), { - logger: this.logger, + maConn = socketToMaConn({ + websocket: toWebSocket(socket), + remoteAddr: toMultiaddr(req.socket.remoteAddress ?? '0.0.0.0', req.socket.remotePort ?? 0), metrics: this.metrics?.events, - metricPrefix: `${this.addr} ` + metricPrefix: `${this.addr} `, + direction: 'inbound', + log: this.components.logger.forComponent('libp2p:websockets:connection:inbound') }) } catch (err: any) { this.log.error('inbound connection failed', err) @@ -222,11 +214,7 @@ export class WebSocketListener extends TypedEventEmitter impleme this.log.error('inbound connection failed to upgrade - %e', err) this.metrics.errors?.increment({ [`${this.addr} inbound_upgrade`]: true }) - await maConn.close() - .catch(err => { - this.log.error('inbound connection failed to close after upgrade failed', err) - this.metrics.errors?.increment({ [`${this.addr} inbound_closing_failed`]: true }) - }) + maConn.closeWrite() }) } diff --git a/packages/transport-websockets/src/socket-to-conn.ts b/packages/transport-websockets/src/socket-to-conn.ts deleted file mode 100644 index 8e5ddbbeb7..0000000000 --- a/packages/transport-websockets/src/socket-to-conn.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { AbortError, ConnectionFailedError } from '@libp2p/interface' -import { CLOSE_TIMEOUT } from './constants.js' -import type { AbortOptions, ComponentLogger, CounterGroup, MultiaddrConnection } from '@libp2p/interface' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { DuplexWebSocket } from 'it-ws/duplex' - -export interface SocketToConnOptions { - localAddr?: Multiaddr - logger: ComponentLogger - metrics?: CounterGroup - metricPrefix?: string -} - -// Convert a stream into a MultiaddrConnection -// https://github.com/libp2p/interface-transport#multiaddrconnection -export function socketToMaConn (stream: DuplexWebSocket, remoteAddr: Multiaddr, options: SocketToConnOptions): MultiaddrConnection { - const metrics = options.metrics - const metricPrefix = options.metricPrefix ?? '' - - const maConn: MultiaddrConnection = { - log: options.logger.forComponent('libp2p:websockets:connection'), - - async sink (source) { - try { - await stream.sink((async function * () { - for await (const buf of source) { - if (buf instanceof Uint8Array) { - yield buf - } else { - yield buf.subarray() - } - } - })()) - } catch (err: any) { - if (err.type !== 'aborted') { - maConn.log.error(err) - } - } - }, - - source: stream.source, - - remoteAddr, - - timeline: { open: Date.now() }, - - async close (options: AbortOptions = {}) { - const start = Date.now() - - if (options.signal == null) { - const signal = AbortSignal.timeout(CLOSE_TIMEOUT) - - options = { - ...options, - signal - } - } - - const listener = (): void => { - const { host, port } = maConn.remoteAddr.toOptions() - maConn.log('timeout closing stream to %s:%s after %dms, destroying it manually', - host, port, Date.now() - start) - - this.abort(new AbortError('Socket close timeout')) - } - - options.signal?.addEventListener('abort', listener) - - try { - await stream.close() - } catch (err: any) { - maConn.log.error('error closing WebSocket gracefully - %e', err) - this.abort(err) - } finally { - options.signal?.removeEventListener('abort', listener) - maConn.timeline.close = Date.now() - } - }, - - abort (err: Error): void { - maConn.log.error('destroying WebSocket after error - %e', err) - stream.destroy() - maConn.timeline.close = Date.now() - - // ws WebSocket.terminate does not accept an Error arg to emit an 'error' - // event on destroy like other node streams so we can't update a metric - // with an event listener - // https://github.com/websockets/ws/issues/1752#issuecomment-622380981 - metrics?.increment({ [`${metricPrefix}error`]: true }) - } - } - - // track local vs remote closing - let closedLocally = false - const close = stream.socket.close.bind(stream.socket) - stream.socket.close = (...args) => { - closedLocally = true - return close(...args) - } - - stream.socket.addEventListener('close', (evt) => { - maConn.log('closed %s, code %d, reason "%s", wasClean %s', closedLocally ? 'locally' : 'by remote', evt.code, evt.reason, evt.wasClean) - - if (!evt.wasClean) { - maConn.abort(new ConnectionFailedError(`${closedLocally ? 'Local' : 'Remote'} did not close WebSocket cleanly`)) - return - } - - metrics?.increment({ [`${metricPrefix}close`]: true }) - maConn.timeline.close = Date.now() - }, { once: true }) - - return maConn -} diff --git a/packages/transport-websockets/src/utils.ts b/packages/transport-websockets/src/utils.ts new file mode 100644 index 0000000000..2d6436edaf --- /dev/null +++ b/packages/transport-websockets/src/utils.ts @@ -0,0 +1,36 @@ +import type { WebSocket as WSSWebSocket } from 'ws' + +/** + * Adds properties/methods to a `WebSocket` instance from the `ws` module to be + * compatible with the `globalThis.WebSocket` API + */ +export function toWebSocket (ws: WSSWebSocket): WebSocket { + Object.defineProperty(ws, 'url', { + value: '', + writable: false + }) + + // @ts-expect-error not a WS/WebSocket method + ws.dispatchEvent = (evt: Event) => { + if (evt.type === 'close') { + ws.emit('close') + } + + if (evt.type === 'open') { + ws.emit('open') + } + + if (evt.type === 'message') { + const m = evt as MessageEvent + ws.emit('data', m.data) + } + + if (evt.type === 'error') { + ws.emit('error', new Error('An error occurred')) + } + ws.emit(evt.type, evt) + } + + // @ts-expect-error ws is now WebSocket + return ws +} diff --git a/packages/transport-websockets/src/websocket-to-conn.ts b/packages/transport-websockets/src/websocket-to-conn.ts new file mode 100644 index 0000000000..a5c2a896b5 --- /dev/null +++ b/packages/transport-websockets/src/websocket-to-conn.ts @@ -0,0 +1,91 @@ +import { AbstractMultiaddrConnection } from '@libp2p/utils' +import { Uint8ArrayList } from 'uint8arraylist' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import type { AbortOptions, MultiaddrConnection } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult } from '@libp2p/utils' + +export interface WebSocketMultiaddrConnectionInit extends Omit { + websocket: WebSocket +} + +class WebSocketMultiaddrConnection extends AbstractMultiaddrConnection { + private websocket: WebSocket + + constructor (init: WebSocketMultiaddrConnectionInit) { + super(init) + + this.websocket = init.websocket + + this.websocket.addEventListener('close', (evt) => { + this.log('closed - code %d, reason "%s", wasClean %s', evt.code, evt.reason, evt.wasClean) + + if (!evt.wasClean) { + this.onRemoteReset() + return + } + + if (this.status === 'open') { + this.onRemoteCloseWrite() + } + }, { once: true }) + + this.websocket.addEventListener('message', (evt) => { + this.onMessage(evt) + .catch(err => { + this.log.error('error receiving data - %e', err) + }) + }) + } + + private async onMessage (evt: MessageEvent): Promise { + let buf: Uint8Array + + if (evt.data instanceof Blob) { + buf = await evt.data.bytes() + } else if (typeof evt.data === 'string') { + buf = uint8ArrayFromString(evt.data) + } else { + buf = new Uint8Array(evt.data, 0, evt.data.byteLength) + } + + this.onData(buf) + } + + sendData (data: Uint8ArrayList): SendResult { + for (const buf of data) { + this.websocket.send(buf) + } + + return { + sentBytes: data.byteLength, + canSendMore: true + } + } + + sendReset (): void { + this.websocket.close(1006) // abnormal closure + } + + async sendCloseWrite (options?: AbortOptions): Promise { + this.websocket.close() + options?.signal?.throwIfAborted() + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendPause (): void { + // read backpressure is not supported + } + + sendResume (): void { + // read backpressure is not supported + } +} + +// Convert a stream into a MultiaddrConnection +// https://github.com/libp2p/interface-transport#multiaddrconnection +export function socketToMaConn (init: WebSocketMultiaddrConnectionInit): MultiaddrConnection { + return new WebSocketMultiaddrConnection(init) +} diff --git a/packages/transport-websockets/test/node.ts b/packages/transport-websockets/test/node.ts index 0052e185e2..4c34405c8d 100644 --- a/packages/transport-websockets/test/node.ts +++ b/packages/transport-websockets/test/node.ts @@ -13,11 +13,19 @@ import { pEvent } from 'p-event' import pWaitFor from 'p-wait-for' import Sinon from 'sinon' import { stubInterface } from 'sinon-ts' +import { setGlobalDispatcher, Agent } from 'undici' import * as filters from '../src/filters.js' import { webSockets } from '../src/index.js' import type { Connection, Libp2pEvents, Listener, Transport, Upgrader, TLSCertificate } from '@libp2p/interface' import type { StubbedInstance } from 'sinon-ts' +// allow connecting to self-signed certificates +setGlobalDispatcher(new Agent({ + connect: { + rejectUnauthorized: false + } +})) + describe('instantiate the transport', () => { it('create', () => { const ws = webSockets()({ @@ -357,9 +365,6 @@ describe('dial', () => { beforeEach(async () => { ws = webSockets({ - websocket: { - rejectUnauthorized: false - }, https: { cert: fs.readFileSync('./test/fixtures/certificate.pem'), key: fs.readFileSync('./test/fixtures/key.pem') @@ -657,11 +662,7 @@ describe('auto-tls (IPv4)', () => { } }) - ws = webSockets({ - websocket: { - rejectUnauthorized: false - } - })({ + ws = webSockets()({ events, logger: defaultLogger() }) @@ -723,11 +724,7 @@ describe('auto-tls (IPv6)', () => { } }) - ws = webSockets({ - websocket: { - rejectUnauthorized: false - } - })({ + ws = webSockets()({ events, logger: defaultLogger() }) diff --git a/packages/transport-webtransport/package.json b/packages/transport-webtransport/package.json index 1c381da838..a11b75533c 100644 --- a/packages/transport-webtransport/package.json +++ b/packages/transport-webtransport/package.json @@ -49,7 +49,6 @@ "@libp2p/utils": "^6.7.1", "@multiformats/multiaddr": "^12.4.4", "@multiformats/multiaddr-matcher": "^2.0.0", - "it-stream-types": "^2.0.2", "multiformats": "^13.3.6", "progress-events": "^1.0.1", "race-signal": "^1.1.3", diff --git a/packages/transport-webtransport/src/constants.ts b/packages/transport-webtransport/src/constants.ts deleted file mode 100644 index c9a0901612..0000000000 --- a/packages/transport-webtransport/src/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const MAX_INBOUND_STREAMS = 1_000 diff --git a/packages/transport-webtransport/src/index.ts b/packages/transport-webtransport/src/index.ts index 59f5a1eac2..76cae38c83 100644 --- a/packages/transport-webtransport/src/index.ts +++ b/packages/transport-webtransport/src/index.ts @@ -3,13 +3,11 @@ * * A [libp2p transport](https://docs.libp2p.io/concepts/transports/overview/) based on [WebTransport](https://www.w3.org/TR/webtransport/). * - * > * > ⚠️ **Note** * > * > This WebTransport implementation currently only allows dialing to other nodes. It does not yet allow listening for incoming dials. This feature requires QUIC support to land in Node JS first. * > * > QUIC support in Node JS is actively being worked on. You can keep an eye on the progress by watching the [related issues on the Node JS issue tracker](https://github.com/nodejs/node/labels/quic) - * > * * @example * @@ -33,20 +31,17 @@ import { noise } from '@chainsafe/libp2p-noise' import { InvalidCryptoExchangeError, InvalidParametersError, serviceCapabilities, transportSymbol } from '@libp2p/interface' import { WebTransport as WebTransportMatcher } from '@multiformats/multiaddr-matcher' import { CustomProgressEvent } from 'progress-events' -import { raceSignal } from 'race-signal' -import { MAX_INBOUND_STREAMS } from './constants.js' import createListener from './listener.js' import { webtransportMuxer } from './muxer.js' -import { inertDuplex } from './utils/inert-duplex.js' +import { toMultiaddrConnection } from './session-to-conn.ts' import { isSubset } from './utils/is-subset.js' import { parseMultiaddr } from './utils/parse-multiaddr.js' +import { WebTransportMessageStream } from './utils/webtransport-message-stream.ts' import WebTransport from './webtransport.js' import type { Upgrader, Transport, CreateListenerOptions, DialTransportOptions, Listener, ComponentLogger, Logger, Connection, MultiaddrConnection, CounterGroup, Metrics, PeerId, OutboundConnectionUpgradeEvents, PrivateKey } from '@libp2p/interface' import type { Multiaddr } from '@multiformats/multiaddr' -import type { Source } from 'it-stream-types' import type { MultihashDigest } from 'multiformats/hashes/interface' import type { ProgressEvent } from 'progress-events' -import type { Uint8ArrayList } from 'uint8arraylist' /** * PEM format server certificate and private key @@ -63,7 +58,6 @@ interface WebTransportSessionCleanup { } export interface WebTransportInit { - maxInboundStreams?: number certificates?: WebTransportCertificate[] } @@ -88,6 +82,7 @@ export type WebTransportDialEvents = interface AuthenticateWebTransportOptions extends DialTransportOptions { wt: WebTransport + maConn: MultiaddrConnection remotePeer?: PeerId certhashes: Array> } @@ -103,7 +98,6 @@ class WebTransportTransport implements Transport { this.components = components this.config = { ...init, - maxInboundStreams: init.maxInboundStreams ?? MAX_INBOUND_STREAMS, certificates: init.certificates ?? [] } @@ -202,36 +196,32 @@ class WebTransportTransport implements Transport { cleanUpWTSession('remote_close') }) - authenticated = await raceSignal(this.authenticateWebTransport({ wt, remotePeer, certhashes, ...options }), options.signal) - - if (!authenticated) { - throw new InvalidCryptoExchangeError('Failed to authenticate webtransport') - } - this.metrics?.dialerEvents.increment({ open: true }) - maConn = { - close: async () => { - this.log('closing webtransport') - cleanUpWTSession('close') - }, - abort: (err: Error) => { - this.log('aborting webtransport due to passed err', err) - cleanUpWTSession('abort') - }, + maConn = toMultiaddrConnection({ remoteAddr: ma, - timeline: { - open: Date.now() - }, - log: this.components.logger.forComponent('libp2p:webtransport:maconn'), - // This connection is never used directly since webtransport supports native streams. - ...inertDuplex() + cleanUpWTSession, + direction: 'outbound', + log: this.components.logger.forComponent('libp2p:webtransport:connection:outbound') + }) + + authenticated = await this.authenticateWebTransport({ + wt, + maConn, + remotePeer, + certhashes, + ...options + }) + + if (!authenticated) { + throw new InvalidCryptoExchangeError('Failed to authenticate webtransport') } return await options.upgrader.upgradeOutbound(maConn, { ...options, skipEncryption: true, - muxerFactory: webtransportMuxer(wt, wt.incomingBidirectionalStreams.getReader(), this.log, this.config), + remotePeer, + muxerFactory: webtransportMuxer(wt), skipProtection: true }) } catch (err: any) { @@ -253,58 +243,30 @@ class WebTransportTransport implements Transport { } } - async authenticateWebTransport ({ wt, remotePeer, certhashes, onProgress, signal }: AuthenticateWebTransportOptions): Promise { - signal?.throwIfAborted() - + async authenticateWebTransport ({ wt, maConn, remotePeer, certhashes, onProgress, signal }: AuthenticateWebTransportOptions): Promise { onProgress?.(new CustomProgressEvent('webtransport:open-authentication-stream')) const stream = await wt.createBidirectionalStream() - const writer = stream.writable.getWriter() - const reader = stream.readable.getReader() - - const duplex = { - source: (async function * () { - while (true) { - const val = await reader.read() - - if (val.value != null) { - yield val.value - } - - if (val.done) { - break - } - } - })(), - sink: async (source: Source) => { - for await (const chunk of source) { - await raceSignal(writer.ready, signal) - - const buf = chunk instanceof Uint8Array ? chunk : chunk.subarray() + signal?.throwIfAborted() - writer.write(buf).catch(err => { - this.log.error('could not write chunk during authentication of WebTransport stream', err) - }) - } - } - } + const messages = new WebTransportMessageStream({ + stream, + log: maConn.log.newScope('muxer') + }) const n = noise()(this.components) onProgress?.(new CustomProgressEvent('webtransport:secure-outbound-connection')) - const { remoteExtensions } = await n.secureOutbound(duplex, { + const { remoteExtensions } = await n.secureOutbound(messages, { signal, remotePeer, skipStreamMuxerNegotiation: true }) onProgress?.(new CustomProgressEvent('webtransport:close-authentication-stream')) - // We're done with this authentication stream - writer.close().catch((err: Error) => { - this.log.error(`Failed to close authentication stream writer: ${err.message}`) - }) - reader.cancel().catch((err: Error) => { - this.log.error(`Failed to close authentication stream reader: ${err.message}`) + // We're done with this authentication stream + await messages.close({ + signal }) // Verify the certhashes we used when dialing are a subset of the certhashes relayed by the remote peer @@ -318,8 +280,7 @@ class WebTransportTransport implements Transport { createListener (options: CreateListenerOptions): Listener { return createListener(this.components, { ...options, - certificates: this.config.certificates, - maxInboundStreams: this.config.maxInboundStreams + certificates: this.config.certificates }) } diff --git a/packages/transport-webtransport/src/listener.ts b/packages/transport-webtransport/src/listener.ts index a0e15ff49d..d62c251f09 100644 --- a/packages/transport-webtransport/src/listener.ts +++ b/packages/transport-webtransport/src/listener.ts @@ -11,7 +11,6 @@ export interface WebTransportListenerInit extends CreateListenerOptions { handler?(conn: Connection): void upgrader: Upgrader certificates?: WebTransportCertificate[] - maxInboundStreams?: number } export default function createListener (components: WebTransportListenerComponents, options: WebTransportListenerInit): Listener { diff --git a/packages/transport-webtransport/src/muxer.ts b/packages/transport-webtransport/src/muxer.ts index 50d725f2f0..c4a19bd4b7 100644 --- a/packages/transport-webtransport/src/muxer.ts +++ b/packages/transport-webtransport/src/muxer.ts @@ -1,103 +1,72 @@ +import { AbstractStreamMuxer } from '@libp2p/utils' import { webtransportBiDiStreamToStream } from './stream.js' -import { inertDuplex } from './utils/inert-duplex.js' +import type { WebTransportStream } from './stream.ts' import type WebTransport from './webtransport.js' -import type { Logger, Stream, StreamMuxer, StreamMuxerFactory, StreamMuxerInit } from '@libp2p/interface' +import type { CreateStreamOptions, MultiaddrConnection, StreamMuxer, StreamMuxerFactory } from '@libp2p/interface' -export interface WebTransportMuxerInit { - maxInboundStreams: number -} +const PROTOCOL = '/webtransport' -export function webtransportMuxer (wt: Pick, reader: ReadableStreamDefaultReader, log: Logger, config: WebTransportMuxerInit): StreamMuxerFactory { - let streamIDCounter = 0 - log = log.newScope('muxer') +class WebTransportStreamMuxer extends AbstractStreamMuxer { + private webTransport: WebTransport + private streamIDCounter: number + private reader: ReadableStreamDefaultReader - return { - protocol: 'webtransport', - createStreamMuxer: (init?: StreamMuxerInit): StreamMuxer => { - // !TODO handle abort signal when WebTransport supports this. - const activeStreams: Stream[] = [] + constructor (webTransport: WebTransport, maConn: MultiaddrConnection) { + super(maConn, { + protocol: PROTOCOL, + name: 'muxer' + }) - Promise.resolve() - .then(async () => { - //! TODO unclear how to add backpressure here? - while (true) { - const { done, value: wtStream } = await reader.read() + this.webTransport = webTransport + this.streamIDCounter = 0 + this.reader = this.webTransport.incomingBidirectionalStreams.getReader() - if (done) { - break - } + Promise.resolve() + .then(async () => { + //! TODO unclear how to add backpressure here? + while (true) { + const { done, value } = await this.reader.read() - if (activeStreams.length >= config.maxInboundStreams) { - log(`too many inbound streams open - ${activeStreams.length}/${config.maxInboundStreams}, closing new incoming stream`) - // We've reached our limit, close this stream. - wtStream.writable.close().catch((err: Error) => { - log.error(`failed to close inbound stream that crossed our maxInboundStream limit: ${err.message}`) - }) - wtStream.readable.cancel().catch((err: Error) => { - log.error(`failed to close inbound stream that crossed our maxInboundStream limit: ${err.message}`) - }) - } else { - const stream = await webtransportBiDiStreamToStream( - wtStream, - String(streamIDCounter++), - 'inbound', - activeStreams, - init?.onStreamEnd, - log - ) - activeStreams.push(stream) - init?.onIncomingStream?.(stream) - } + if (done || value == null) { + break } - }) - .catch(err => { - log.error('could not create a new stream', err) - }) - - const muxer: StreamMuxer = { - protocol: 'webtransport', - streams: activeStreams, - newStream: async (name?: string): Promise => { - log('new outgoing stream', name) - const wtStream = await wt.createBidirectionalStream() - const stream = await webtransportBiDiStreamToStream(wtStream, String(streamIDCounter++), init?.direction ?? 'outbound', activeStreams, init?.onStreamEnd, log) - activeStreams.push(stream) - - return stream - }, + this.onRemoteStream( + webtransportBiDiStreamToStream( + value, + String(this.streamIDCounter++), + 'inbound', + this.log + ) + ) + } + }) + .catch(err => { + this.log.error('could not create a new stream - %e', err) + }) + } - /** - * Close all tracked streams and stop the muxer - */ - close: async () => { - log('closing webtransport muxer gracefully') + async onCreateStream (options: CreateStreamOptions): Promise { + const wtStream = await this.webTransport.createBidirectionalStream() + options?.signal?.throwIfAborted() - try { - wt.close() - } catch (err: any) { - muxer.abort(err) - } - }, + return webtransportBiDiStreamToStream(wtStream, String(this.streamIDCounter++), 'outbound', this.log) + } - /** - * Abort all tracked streams and stop the muxer - */ - abort: (err: Error) => { - log('closing webtransport muxer with err:', err) + onData (): void { - try { - wt.close() - } catch (err: any) { - log.error('webtransport session threw error during close', err) - } - }, + } - // This stream muxer is webtransport native. Therefore it doesn't plug in with any other duplex. - ...inertDuplex() - } + sendReset (): void { + this.webTransport.close() + } +} - return muxer +export function webtransportMuxer (webTransport: WebTransport): StreamMuxerFactory { + return { + protocol: PROTOCOL, + createStreamMuxer (maConn: MultiaddrConnection): StreamMuxer { + return new WebTransportStreamMuxer(webTransport, maConn) } } } diff --git a/packages/transport-webtransport/src/session-to-conn.ts b/packages/transport-webtransport/src/session-to-conn.ts new file mode 100644 index 0000000000..352f8c0df8 --- /dev/null +++ b/packages/transport-webtransport/src/session-to-conn.ts @@ -0,0 +1,54 @@ +import { AbstractMultiaddrConnection } from '@libp2p/utils' +import type { AbortOptions, MultiaddrConnection } from '@libp2p/interface' +import type { AbstractMultiaddrConnectionInit, SendResult } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface WebTransportSessionMultiaddrConnectionInit extends Omit { + cleanUpWTSession(metric: string): void +} + +class WebTransportSessionMultiaddrConnection extends AbstractMultiaddrConnection { + private cleanUpWTSession: (metric: string) => void + + constructor (init: WebTransportSessionMultiaddrConnectionInit) { + super(init) + + this.cleanUpWTSession = init.cleanUpWTSession + } + + sendData (data: Uint8ArrayList): SendResult { + return { + sentBytes: data.byteLength, + canSendMore: true + } + } + + sendReset (): void { + this.cleanUpWTSession('abort') + } + + async sendCloseWrite (options?: AbortOptions): Promise { + this.cleanUpWTSession('close') + options?.signal?.throwIfAborted() + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendPause (): void { + // TODO: backpressure? + } + + sendResume (): void { + // TODO: backpressure? + } +} + +/** + * Convert a socket into a MultiaddrConnection + * https://github.com/libp2p/interface-transport#multiaddrconnection + */ +export const toMultiaddrConnection = (init: WebTransportSessionMultiaddrConnectionInit): MultiaddrConnection => { + return new WebTransportSessionMultiaddrConnection(init) +} diff --git a/packages/transport-webtransport/src/stream.ts b/packages/transport-webtransport/src/stream.ts index b3e390dccc..af0aa0d555 100644 --- a/packages/transport-webtransport/src/stream.ts +++ b/packages/transport-webtransport/src/stream.ts @@ -1,14 +1,14 @@ -import { AbstractStream } from '@libp2p/utils/abstract-stream' +import { AbstractStream } from '@libp2p/utils' import { raceSignal } from 'race-signal' -import { Uint8ArrayList } from 'uint8arraylist' -import type { AbortOptions, Direction, Logger, Stream } from '@libp2p/interface' -import type { AbstractStreamInit } from '@libp2p/utils/abstract-stream' +import type { AbortOptions, MessageStreamDirection, Logger } from '@libp2p/interface' +import type { AbstractStreamInit, SendResult } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' interface WebTransportStreamInit extends AbstractStreamInit { bidiStream: WebTransportBidirectionalStream } -class WebTransportStream extends AbstractStream { +export class WebTransportStream extends AbstractStream { private readonly writer: WritableStreamDefaultWriter private readonly reader: ReadableStreamDefaultReader @@ -18,63 +18,87 @@ class WebTransportStream extends AbstractStream { this.writer = init.bidiStream.writable.getWriter() this.reader = init.bidiStream.readable.getReader() + void this.writer.closed + .then(() => { + this.log('writer closed') + }) + .catch((err) => { + this.log('writer close promise rejected - %e', err) + }) + .finally(() => { + this.onRemoteCloseRead() + }) + + this.readData() + } + + private readData (): void { Promise.resolve() .then(async () => { while (true) { const result = await this.reader.read() if (result.done) { - init.log('remote closed write') + this.log('remote closed write') + this.onRemoteCloseWrite() return } if (result.value != null) { - this.sourcePush(new Uint8ArrayList(result.value)) + this.onData(result.value) + } + + if (this.readStatus === 'paused') { + break } } }) .catch(err => { - init.log.error('error reading from stream', err) this.abort(err) }) .finally(() => { - this.remoteCloseWrite() + this.reader.releaseLock() }) + } - void this.writer.closed + sendData (data: Uint8ArrayList): SendResult { + // the streams spec recommends not waiting for data to be sent + // https://streams.spec.whatwg.org/#example-manual-write-dont-await + this.writer.ready .then(() => { - init.log('writer closed') - }) - .catch((err) => { - init.log('writer close promise rejected', err) + for (const buf of data) { + this.writer.write(buf) + } }) - .finally(() => { - this.remoteCloseRead() + .catch(err => { + this.log.error('error sending stream data - %e', err) }) - } - sendNewStream (options?: AbortOptions | undefined): void { - // this is a no-op - } + // The desiredSize read-only property of the WritableStreamDefaultWriter + // interface returns the desired size required to fill the stream's internal + // queue. + // + // the value will be null if the stream cannot be successfully written to + // (due to either being errored, or having an abort queued up), and zero if + // the stream is closed. It can be negative if the queue is over-full + if (this.writer.desiredSize == null) { + return { + sentBytes: data.byteLength, + canSendMore: false + } + } - async sendData (buf: Uint8ArrayList, options?: AbortOptions): Promise { - for (const chunk of buf) { - this.log('sendData waiting for writer to be ready') - await raceSignal(this.writer.ready, options?.signal) - - // the streams spec recommends not waiting for data to be sent - // https://streams.spec.whatwg.org/#example-manual-write-dont-await - this.writer.write(chunk) - .catch(err => { - this.log.error('error sending stream data', err) - }) + return { + sentBytes: data.byteLength, + canSendMore: this.writer.desiredSize > 0 } } - async sendReset (options?: AbortOptions): Promise { - this.log('sendReset aborting writer') - await raceSignal(this.writer.abort(), options?.signal) - this.log('sendReset aborted writer') + sendReset (err: Error): void { + this.writer.abort(err) + .catch(err => { + this.log.error('error aborting writer - %e', err) + }) } async sendCloseWrite (options?: AbortOptions): Promise { @@ -88,23 +112,21 @@ class WebTransportStream extends AbstractStream { await raceSignal(this.reader.cancel(), options?.signal) this.log('sendCloseRead cancelled reader') } + + sendPause (): void { + + } + + sendResume (): void { + this.readData() + } } -export async function webtransportBiDiStreamToStream (bidiStream: WebTransportBidirectionalStream, streamId: string, direction: Direction, activeStreams: Stream[], onStreamEnd: undefined | ((s: Stream) => void), log: Logger): Promise { - const stream = new WebTransportStream({ +export function webtransportBiDiStreamToStream (bidiStream: WebTransportBidirectionalStream, streamId: string, direction: MessageStreamDirection, log: Logger): WebTransportStream { + return new WebTransportStream({ bidiStream, id: streamId, direction, - log: log.newScope(`${direction}:${streamId}`), - onEnd: () => { - const index = activeStreams.findIndex(s => s === stream) - if (index !== -1) { - activeStreams.splice(index, 1) - } - - onStreamEnd?.(stream) - } + log: log.newScope(`${direction}:${streamId}`) }) - - return stream } diff --git a/packages/transport-webtransport/src/utils/inert-duplex.ts b/packages/transport-webtransport/src/utils/inert-duplex.ts deleted file mode 100644 index 33c93fa090..0000000000 --- a/packages/transport-webtransport/src/utils/inert-duplex.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { Duplex, Source } from 'it-stream-types' - -// Duplex that does nothing. Needed to fulfill the interface -export function inertDuplex (): Duplex { - return { - source: { - [Symbol.asyncIterator] () { - return { - async next () { - // This will never resolve - return new Promise(() => { }) - } - } - } - }, - sink: async (source: Source) => { - // This will never resolve - return new Promise(() => { }) - } - } -} diff --git a/packages/transport-webtransport/src/utils/parse-multiaddr.ts b/packages/transport-webtransport/src/utils/parse-multiaddr.ts index 8ec7051749..0d4ac5c807 100644 --- a/packages/transport-webtransport/src/utils/parse-multiaddr.ts +++ b/packages/transport-webtransport/src/utils/parse-multiaddr.ts @@ -1,6 +1,5 @@ import { InvalidMultiaddrError } from '@libp2p/interface' import { peerIdFromString } from '@libp2p/peer-id' -import { protocols } from '@multiformats/multiaddr' import { WebTransport } from '@multiformats/multiaddr-matcher' import { bases, digest } from 'multiformats/basics' import type { PeerId } from '@libp2p/interface' @@ -17,7 +16,7 @@ function decodeCerthashStr (s: string): MultihashDigest { export interface ParsedMultiaddr { url: string certhashes: MultihashDigest[] - remotePeer?: PeerId + remotePeer: PeerId } export function parseMultiaddr (ma: Multiaddr): ParsedMultiaddr { @@ -25,15 +24,23 @@ export function parseMultiaddr (ma: Multiaddr): ParsedMultiaddr { throw new InvalidMultiaddrError('Invalid multiaddr, was not a WebTransport address') } - const parts = ma.stringTuples() - const certhashes = parts - .filter(([name, _]) => name === protocols('certhash').code) - .map(([_, value]) => decodeCerthashStr(value ?? '')) + const certhashes: MultihashDigest[] = [] + let remotePeer: PeerId | undefined - // only take the first peer id in the multiaddr as it may be a relay - const remotePeer = parts - .filter(([name, _]) => name === protocols('p2p').code) - .map(([_, value]) => peerIdFromString(value ?? ''))[0] + for (const components of ma.getComponents()) { + if (components.name === 'certhash') { + certhashes.push(decodeCerthashStr(components.value ?? '')) + } + + // only take the first peer id in the multiaddr as it may be a relay + if (components.name === 'p2p' && remotePeer == null) { + remotePeer = peerIdFromString(components.value ?? '') + } + } + + if (remotePeer == null) { + throw new InvalidMultiaddrError('Remote peer must be present in multiaddr') + } const opts = ma.toOptions() let host = opts.host diff --git a/packages/transport-webtransport/src/utils/webtransport-message-stream.ts b/packages/transport-webtransport/src/utils/webtransport-message-stream.ts new file mode 100644 index 0000000000..b92def30ae --- /dev/null +++ b/packages/transport-webtransport/src/utils/webtransport-message-stream.ts @@ -0,0 +1,74 @@ +import { StreamMessageEvent } from '@libp2p/interface' +import { AbstractMessageStream } from '@libp2p/utils' +import { raceSignal } from 'race-signal' +import type { AbortOptions } from '@libp2p/interface' +import type { MessageStreamInit, SendResult } from '@libp2p/utils' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface WebTransportMessageStreamInit extends MessageStreamInit { + stream: WebTransportBidirectionalStream +} + +export class WebTransportMessageStream extends AbstractMessageStream { + private writer: WritableStreamDefaultWriter + private reader: ReadableStreamDefaultReader + + constructor (init: WebTransportMessageStreamInit) { + super(init) + + this.writer = init.stream.writable.getWriter() + this.reader = init.stream.readable.getReader() + + Promise.resolve().then(async () => { + while (true) { + const { done, value } = await this.reader.read() + + if (value != null) { + this.dispatchEvent(new StreamMessageEvent(value)) + } + + if (done) { + break + } + } + }) + .catch(err => { + this.abort(err) + }) + } + + async sendCloseWrite (options?: AbortOptions): Promise { + await raceSignal(this.writer.close(), options?.signal) + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendData (data: Uint8ArrayList): SendResult { + this.writer.write(data) + .catch(err => { + this.abort(err) + }) + + return { + sentBytes: data.byteLength, + canSendMore: true + } + } + + sendReset (err: Error): void { + this.writer.abort(err) + .catch(err => { + this.log.error('could not send reset - %e', err) + }) + } + + sendPause (): void { + + } + + sendResume (): void { + + } +} diff --git a/packages/transport-webtransport/test/browser.ts b/packages/transport-webtransport/test/browser.ts index 197bb41261..f7c559d5a5 100644 --- a/packages/transport-webtransport/test/browser.ts +++ b/packages/transport-webtransport/test/browser.ts @@ -108,13 +108,16 @@ describe('libp2p-webtransport', () => { const stream = await node.dialProtocol(ma, '/echo/1.0.0') expect(stream.timeline.closeWrite).to.be.undefined() - expect(stream.timeline.closeRead).to.be.undefined() expect(stream.timeline.close).to.be.undefined() // send and receive data const [, output] = await Promise.all([ - stream.sink(gen()), - toBuffer(map(stream.source, buf => buf.subarray())) + Promise.resolve().then(async () => { + for await (const buf of gen()) { + stream.send(buf) + } + }), + toBuffer(map(stream, buf => buf.subarray())) ]) // closing takes a little bit of time @@ -132,7 +135,6 @@ describe('libp2p-webtransport', () => { // should have set timeline events expect(stream.timeline.closeWrite).to.be.greaterThan(0) - expect(stream.timeline.closeRead).to.be.greaterThan(0) expect(stream.timeline.close).to.be.greaterThan(0) }) }) diff --git a/packages/upnp-nat/src/check-external-address.ts b/packages/upnp-nat/src/check-external-address.ts index 45bc6c0aa5..7304e5c8a7 100644 --- a/packages/upnp-nat/src/check-external-address.ts +++ b/packages/upnp-nat/src/check-external-address.ts @@ -1,11 +1,11 @@ import { NotStartedError, start, stop } from '@libp2p/interface' -import { repeatingTask } from '@libp2p/utils/repeating-task' +import { repeatingTask } from '@libp2p/utils' import pDefer from 'p-defer' import { raceSignal } from 'race-signal' import type { Gateway } from '@achingbrain/nat-port-mapper' import type { AbortOptions, ComponentLogger, Logger, Startable } from '@libp2p/interface' import type { AddressManager } from '@libp2p/interface-internal' -import type { RepeatingTask } from '@libp2p/utils/repeating-task' +import type { RepeatingTask } from '@libp2p/utils' import type { DeferredPromise } from 'p-defer' export interface ExternalAddressCheckerComponents { diff --git a/packages/upnp-nat/src/search-gateway-finder.ts b/packages/upnp-nat/src/search-gateway-finder.ts index d54ea3faf8..496df829e9 100644 --- a/packages/upnp-nat/src/search-gateway-finder.ts +++ b/packages/upnp-nat/src/search-gateway-finder.ts @@ -1,11 +1,11 @@ import { start, stop } from '@libp2p/interface' -import { repeatingTask } from '@libp2p/utils/repeating-task' +import { repeatingTask } from '@libp2p/utils' import { TypedEventEmitter } from 'main-event' import { DEFAULT_GATEWAY_SEARCH_INTERVAL, DEFAULT_GATEWAY_SEARCH_MESSAGE_INTERVAL, DEFAULT_GATEWAY_SEARCH_TIMEOUT, DEFAULT_INITIAL_GATEWAY_SEARCH_INTERVAL, DEFAULT_INITIAL_GATEWAY_SEARCH_MESSAGE_INTERVAL, DEFAULT_INITIAL_GATEWAY_SEARCH_TIMEOUT } from './constants.js' import type { GatewayFinder, GatewayFinderEvents } from './upnp-nat.js' import type { Gateway, UPnPNAT } from '@achingbrain/nat-port-mapper' import type { ComponentLogger, Logger } from '@libp2p/interface' -import type { RepeatingTask } from '@libp2p/utils/repeating-task' +import type { RepeatingTask } from '@libp2p/utils' export interface SearchGatewayFinderComponents { logger: ComponentLogger diff --git a/packages/upnp-nat/src/upnp-nat.ts b/packages/upnp-nat/src/upnp-nat.ts index 0d33db8088..840722bd33 100644 --- a/packages/upnp-nat/src/upnp-nat.ts +++ b/packages/upnp-nat/src/upnp-nat.ts @@ -1,6 +1,6 @@ import { upnpNat } from '@achingbrain/nat-port-mapper' import { serviceCapabilities, serviceDependencies, start, stop } from '@libp2p/interface' -import { debounce } from '@libp2p/utils/debounce' +import { debounce } from '@libp2p/utils' import { setMaxListeners } from 'main-event' import { SearchGatewayFinder } from './search-gateway-finder.js' import { StaticGatewayFinder } from './static-gateway-finder.js' @@ -8,7 +8,7 @@ import { UPnPPortMapper } from './upnp-port-mapper.js' import type { UPnPNATComponents, UPnPNATInit, UPnPNAT as UPnPNATInterface } from './index.js' import type { Gateway, UPnPNAT as UPnPNATClient } from '@achingbrain/nat-port-mapper' import type { Logger, Startable } from '@libp2p/interface' -import type { DebouncedFunction } from '@libp2p/utils/debounce' +import type { DebouncedFunction } from '@libp2p/utils' import type { TypedEventTarget } from 'main-event' export interface GatewayFinderEvents { diff --git a/packages/upnp-nat/src/upnp-port-mapper.ts b/packages/upnp-nat/src/upnp-port-mapper.ts index 652dc6e96d..6714e9a863 100644 --- a/packages/upnp-nat/src/upnp-port-mapper.ts +++ b/packages/upnp-nat/src/upnp-port-mapper.ts @@ -1,9 +1,6 @@ import { isIPv4 } from '@chainsafe/is-ip' import { InvalidParametersError, start, stop } from '@libp2p/interface' -import { isLinkLocal } from '@libp2p/utils/multiaddr/is-link-local' -import { isLoopback } from '@libp2p/utils/multiaddr/is-loopback' -import { isPrivate } from '@libp2p/utils/multiaddr/is-private' -import { isPrivateIp } from '@libp2p/utils/private-ip' +import { isLinkLocal, isLoopback, isPrivate, isPrivateIp } from '@libp2p/utils' import { multiaddr } from '@multiformats/multiaddr' import { QUIC_V1, TCP, WebSockets, WebSocketsSecure, WebTransport } from '@multiformats/multiaddr-matcher' import { dynamicExternalAddress, staticExternalAddress } from './check-external-address.js' diff --git a/packages/utils/README.md b/packages/utils/README.md index 2b32a94af1..2d88947e75 100644 --- a/packages/utils/README.md +++ b/packages/utils/README.md @@ -36,7 +36,7 @@ These problems are the motivation for this package, having shared logic in this Each function should be imported directly. ```TypeScript -import { ipPortToMultiaddr } from '@libp2p/utils/ip-port-to-multiaddr' +import { ipPortToMultiaddr } from '@libp2p/utils' const ma = ipPortToMultiaddr('127.0.0.1', 9000) ``` diff --git a/packages/utils/package.json b/packages/utils/package.json index c7a9ae9eed..01c8963cc2 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -17,22 +17,6 @@ }, "type": "module", "types": "./dist/src/index.d.ts", - "typesVersions": { - "*": { - "*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ], - "src/*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ] - } - }, "files": [ "src", "dist", @@ -43,135 +27,6 @@ ".": { "types": "./dist/src/index.d.ts", "import": "./dist/src/index.js" - }, - "./abort-options": { - "types": "./dist/src/abort-options.d.ts", - "import": "./dist/src/abort-options.js" - }, - "./abstract-stream": { - "types": "./dist/src/abstract-stream.d.ts", - "import": "./dist/src/abstract-stream.js" - }, - "./adaptive-timeout": { - "types": "./dist/src/adaptive-timeout.d.ts", - "import": "./dist/src/adaptive-timeout.js" - }, - "./array-equals": { - "types": "./dist/src/array-equals.d.ts", - "import": "./dist/src/array-equals.js" - }, - "./close": { - "types": "./dist/src/close.d.ts", - "import": "./dist/src/close.js" - }, - "./close-source": { - "types": "./dist/src/close-source.d.ts", - "import": "./dist/src/close-source.js" - }, - "./debounce": { - "types": "./dist/src/debounce.d.ts", - "import": "./dist/src/debounce.js" - }, - "./filters": { - "types": "./dist/src/filters/index.d.ts", - "import": "./dist/src/filters/index.js" - }, - "./get-thin-waist-addresses": { - "types": "./dist/src/get-thin-waist-addresses.d.ts", - "browser": "./dist/src/get-thin-waist-addresses.browser.js", - "import": "./dist/src/get-thin-waist-addresses.js" - }, - "./global-unicast-ip": { - "types": "./dist/src/global-unicast-ip.d.ts", - "import": "./dist/src/global-unicast-ip.js" - }, - "./ip-port-to-multiaddr": { - "types": "./dist/src/ip-port-to-multiaddr.d.ts", - "import": "./dist/src/ip-port-to-multiaddr.js" - }, - "./is-async-generator": { - "types": "./dist/src/is-async-generator.d.ts", - "import": "./dist/src/is-async-generator.js" - }, - "./is-generator": { - "types": "./dist/src/is-generator.d.ts", - "import": "./dist/src/is-generator.js" - }, - "./is-promise": { - "types": "./dist/src/is-promise.d.ts", - "import": "./dist/src/is-promise.js" - }, - "./link-local-ip": { - "types": "./dist/src/link-local-ip.d.ts", - "import": "./dist/src/link-local-ip.js" - }, - "./merge-options": { - "types": "./dist/src/merge-options.d.ts", - "import": "./dist/src/merge-options.js" - }, - "./moving-average": { - "types": "./dist/src/moving-average.d.ts", - "import": "./dist/src/moving-average.js" - }, - "./multiaddr/is-global-unicast": { - "types": "./dist/src/multiaddr/is-global-unicast.d.ts", - "import": "./dist/src/multiaddr/is-global-unicast.js" - }, - "./multiaddr/is-ip-based": { - "types": "./dist/src/multiaddr/is-ip-based.d.ts", - "import": "./dist/src/multiaddr/is-ip-based.js" - }, - "./multiaddr/is-link-local": { - "types": "./dist/src/multiaddr/is-link-local.d.ts", - "import": "./dist/src/multiaddr/is-link-local.js" - }, - "./multiaddr/is-loopback": { - "types": "./dist/src/multiaddr/is-loopback.d.ts", - "import": "./dist/src/multiaddr/is-loopback.js" - }, - "./multiaddr/is-network-address": { - "types": "./dist/src/multiaddr/is-network-address.d.ts", - "import": "./dist/src/multiaddr/is-network-address.js" - }, - "./multiaddr/is-private": { - "types": "./dist/src/multiaddr/is-private.d.ts", - "import": "./dist/src/multiaddr/is-private.js" - }, - "./peer-queue": { - "types": "./dist/src/peer-queue.d.ts", - "import": "./dist/src/peer-queue.js" - }, - "./priority-queue": { - "types": "./dist/src/priority-queue.d.ts", - "import": "./dist/src/priority-queue.js" - }, - "./private-ip": { - "types": "./dist/src/private-ip.d.ts", - "import": "./dist/src/private-ip.js" - }, - "./queue": { - "types": "./dist/src/queue/index.d.ts", - "import": "./dist/src/queue/index.js" - }, - "./rate-limiter": { - "types": "./dist/src/rate-limiter.d.ts", - "import": "./dist/src/rate-limiter.js" - }, - "./repeating-task": { - "types": "./dist/src/repeating-task.d.ts", - "import": "./dist/src/repeating-task.js" - }, - "./stream-to-ma-conn": { - "types": "./dist/src/stream-to-ma-conn.d.ts", - "import": "./dist/src/stream-to-ma-conn.js" - }, - "./tracked-list": { - "types": "./dist/src/tracked-list.d.ts", - "import": "./dist/src/tracked-list.js" - }, - "./tracked-map": { - "types": "./dist/src/tracked-map.d.ts", - "import": "./dist/src/tracked-map.js" } }, "scripts": { @@ -198,18 +53,19 @@ "@sindresorhus/fnv1a": "^3.1.0", "any-signal": "^4.1.1", "delay": "^6.0.0", - "get-iterator": "^2.0.1", "is-loopback-addr": "^2.0.2", "is-plain-obj": "^4.1.0", - "it-foreach": "^2.1.3", + "it-length-prefixed": "^10.0.1", "it-pipe": "^3.0.1", "it-pushable": "^3.2.3", "it-stream-types": "^2.0.2", "main-event": "^1.0.1", "netmask": "^2.0.2", "p-defer": "^4.0.1", - "race-event": "^1.3.0", + "p-event": "^6.0.1", + "race-event": "^1.6.0", "race-signal": "^1.1.3", + "uint8-varint": "^2.0.4", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" }, @@ -218,19 +74,19 @@ "@types/netmask": "^2.0.5", "aegir": "^47.0.14", "benchmark": "^2.1.4", - "delay": "^6.0.0", "it-all": "^3.0.8", - "it-drain": "^3.0.9", - "it-pair": "^2.0.6", + "it-drain": "^3.0.10", "sinon": "^20.0.0", "sinon-ts": "^2.0.0", "wherearewe": "^2.0.1" }, "browser": { - "./dist/src/get-thin-waist-addresses.js": "./dist/src/get-thin-waist-addresses.browser.js" + "./dist/src/get-thin-waist-addresses.js": "./dist/src/get-thin-waist-addresses.browser.js", + "./dist/src/socket-writer.js": "./dist/src/socket-writer.browser.js" }, "react-native": { - "./dist/src/get-thin-waist-addresses.js": "./dist/src/get-thin-waist-addresses.js" + "./dist/src/get-thin-waist-addresses.js": "./dist/src/get-thin-waist-addresses.js", + "./dist/src/socket-writer.js": "./dist/src/socket-writer.js" }, "sideEffects": false } diff --git a/packages/utils/src/abort-options.ts b/packages/utils/src/abort-options.ts deleted file mode 100644 index 05f9ace8db..0000000000 --- a/packages/utils/src/abort-options.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { anySignal } from 'any-signal' -import { setMaxListeners } from 'main-event' -import type { AbortOptions } from '@libp2p/interface' -import type { ClearableSignal } from 'any-signal' - -export function createTimeoutOptions (timeout: number): AbortOptions -export function createTimeoutOptions (timeout: number, ...existingSignals: AbortSignal[]): { signal: ClearableSignal } -export function createTimeoutOptions (timeout: number, ...existingSignals: AbortSignal[]): AbortOptions { - let signal = AbortSignal.timeout(timeout) - setMaxListeners(Infinity, signal) - - if (existingSignals.length > 0) { - signal = anySignal([signal, ...existingSignals]) - setMaxListeners(Infinity, signal) - } - - return { - signal - } -} diff --git a/packages/utils/src/abstract-message-stream.ts b/packages/utils/src/abstract-message-stream.ts new file mode 100644 index 0000000000..b835b225d0 --- /dev/null +++ b/packages/utils/src/abstract-message-stream.ts @@ -0,0 +1,508 @@ +import { StreamResetError, StreamStateError, TypedEventEmitter, StreamMessageEvent, StreamBufferError, StreamResetEvent, StreamAbortEvent, StreamCloseEvent } from '@libp2p/interface' +import { pushable } from 'it-pushable' +import { raceEvent } from 'race-event' +import { Uint8ArrayList } from 'uint8arraylist' +import type { MessageStreamEvents, MessageStreamStatus, MessageStream, AbortOptions, MessageStreamTimeline, MessageStreamReadStatus, MessageStreamWriteStatus, MessageStreamDirection } from '@libp2p/interface' +import type { Logger } from '@libp2p/logger' + +const DEFAULT_MAX_PAUSE_BUFFER_LENGTH = Math.pow(2, 20) * 4 // 4MB + +export interface MessageStreamInit { + /** + * A Logger implementation used to log stream-specific information + */ + log: Logger + + /** + * If no data is sent or received in this number of ms the stream will be + * reset and an 'error' event emitted. + * + * @default 120_000 + */ + inactivityTimeout?: number + + /** + * The maximum number of bytes to store when paused. If receipt of more bytes + * from the remote end of the stream causes the buffer size to exceed this + * value the stream will be reset and an 'error' event emitted. + */ + maxPauseBufferLength?: number + + /** + * The stream direction + */ + direction?: MessageStreamDirection +} + +export interface SendResult { + /** + * The number of bytes from the passed buffer that were sent + */ + sentBytes: number + + /** + * If the underlying resource can accept more data immediately. If `true`, + * `sent` must equal the `.byteLength` of the buffer passed to `sendData`. + */ + canSendMore: boolean +} + +export abstract class AbstractMessageStream extends TypedEventEmitter implements MessageStream { + public readonly timeline: MessageStreamTimeline + public status: MessageStreamStatus + public readStatus: MessageStreamReadStatus + public writeStatus: MessageStreamWriteStatus + public remoteReadStatus: MessageStreamReadStatus + public remoteWriteStatus: MessageStreamWriteStatus + public inactivityTimeout: number + public maxPauseBufferLength: number + public readonly log: Logger + public direction: MessageStreamDirection + + protected readonly pauseBuffer: Uint8ArrayList + protected readonly sendQueue: Uint8ArrayList + + constructor (init: MessageStreamInit) { + super() + + this.log = init.log + this.direction = init.direction ?? 'outbound' + this.status = 'open' + this.readStatus = 'readable' + this.remoteReadStatus = 'readable' + this.writeStatus = 'writable' + this.remoteWriteStatus = 'writable' + this.inactivityTimeout = init.inactivityTimeout ?? 120_000 + this.maxPauseBufferLength = init.maxPauseBufferLength ?? DEFAULT_MAX_PAUSE_BUFFER_LENGTH + this.pauseBuffer = new Uint8ArrayList() + this.sendQueue = new Uint8ArrayList() + this.timeline = { + open: Date.now() + } + + this.processSendQueue = this.processSendQueue.bind(this) + + this.addEventListener('drain', () => { + this.log('begin sending again, write status was %s, send queue size', this.writeStatus, this.sendQueue.byteLength) + + if (this.writeStatus === 'paused') { + this.writeStatus = 'writable' + } + + this.processSendQueue() + }) + } + + async * [Symbol.asyncIterator] (): AsyncGenerator { + const output = pushable() + + const onMessage = (evt: StreamMessageEvent): void => { + output.push(evt.data) + } + this.addEventListener('message', onMessage) + + const onClose = (evt: StreamCloseEvent): void => { + output.end(evt.error) + } + this.addEventListener('close', onClose) + + const onRemoteCloseWrite = (): void => { + output.end() + } + this.addEventListener('remoteCloseWrite', onRemoteCloseWrite) + + try { + yield * output + } finally { + this.removeEventListener('message', onMessage) + this.removeEventListener('close', onClose) + this.removeEventListener('remoteCloseWrite', onRemoteCloseWrite) + } + } + + send (data: Uint8Array | Uint8ArrayList): boolean { + if (this.writeStatus !== 'writable' && this.writeStatus !== 'paused') { + // return true to make this a no-op otherwise callers might wait for a + // "drain" event that will never come + return true + } + + this.sendQueue.append(data) + return this.processSendQueue() + } + + /** + * Close immediately for reading and writing and send a reset message (local + * error) + */ + abort (err: Error): void { + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + return + } + + this.log.error('abort with error - %e', err) + + try { + this.sendReset(err) + } catch (err: any) { + this.log('failed to send reset to remote - %e', err) + } + + this.status = 'aborted' + this.writeStatus = 'closed' + this.remoteWriteStatus = 'closed' + this.readStatus = 'closed' + this.remoteReadStatus = 'closed' + + this.timeline.abort = Date.now() + this.timeline.closeWrite = Date.now() + this.timeline.remoteCloseWrite = Date.now() + this.timeline.closeRead = Date.now() + this.timeline.remoteCloseRead = Date.now() + + if (this.pauseBuffer.byteLength > 0) { + this.pauseBuffer.consume(this.pauseBuffer.byteLength) + } + + if (this.sendQueue.byteLength > 0) { + this.sendQueue.consume(this.sendQueue.byteLength) + } + + this.dispatchEvent(new StreamAbortEvent(err)) + } + + async close (options?: AbortOptions): Promise { + if (this.status !== 'open') { + return + } + + this.log.trace('closing gracefully') + + this.status = 'closing' + + await Promise.all([ + this.closeRead(options), + this.closeWrite(options) + ]) + + this.log.trace('closed gracefully') + + this.onClosed() + } + + async closeWrite (options?: AbortOptions): Promise { + if (this.writeStatus === 'closing' || this.writeStatus === 'closed') { + return + } + + const startingWriteStatus = this.writeStatus + + this.writeStatus = 'closing' + + if (startingWriteStatus === 'paused') { + this.log.trace('waiting for drain before closing writable end of stream, %d unsent bytes', this.sendQueue.byteLength) + await raceEvent(this, 'drain', options?.signal) + } + + await this.sendCloseWrite(options) + + this.writeStatus = 'closed' + this.timeline.closeWrite = Date.now() + + this.log('closed writable end gracefully') + + setTimeout(() => { + this.safeDispatchEvent('closeWrite') + + if (this.remoteWriteStatus === 'closed') { + this.onClosed() + } + }, 0) + } + + async closeRead (options?: AbortOptions): Promise { + if (this.readStatus === 'closing' || this.readStatus === 'closed') { + return + } + + this.readStatus = 'closing' + + await this.sendCloseRead(options) + + this.readStatus = 'closed' + this.timeline.closeRead = Date.now() + + this.log('closed readable end gracefully') + + setTimeout(() => { + this.safeDispatchEvent('closeRead') + }, 0) + } + + pause (): void { + this.log.trace('pausing readable end') + + if (this.readStatus !== 'readable') { + return + } + + this.readStatus = 'paused' + this.sendPause() + } + + resume (): void { + this.log.trace('resuming readable end') + + if (this.readStatus !== 'paused') { + return + } + + this.readStatus = 'readable' + + // emit any data that accumulated while we were paused + if (this.pauseBuffer.byteLength > 0) { + const data = new Uint8ArrayList(this.pauseBuffer) + this.pauseBuffer.consume(this.pauseBuffer.byteLength) + this.dispatchEvent(new StreamMessageEvent(data)) + } + + if (this.writeStatus === 'closing' || this.writeStatus === 'closed' || + this.remoteReadStatus === 'closing' || this.remoteReadStatus === 'closed' + ) { + return + } + + this.sendResume() + } + + push (data: Uint8Array | Uint8ArrayList): void { + if (data.byteLength === 0) { + return + } + + this.pauseBuffer.append(data) + + setTimeout(() => { + this.dispatchPauseBuffer() + }, 0) + } + + /** + * When an extending class reads data from it's implementation-specific source, + * call this method to allow the stream consumer to read the data. + */ + onData (data: Uint8Array | Uint8ArrayList): void { + // discard the data if our readable end is closed + if (this.readStatus === 'closing' || this.readStatus === 'closed') { + return + } + + // check the pause buffer in case data has been pushed onto the stream + this.dispatchPauseBuffer() + + if (data.byteLength === 0) { + return + } + + if (this.readStatus === 'readable') { + this.dispatchEvent(new StreamMessageEvent(data)) + } else if (this.readStatus === 'paused') { + // queue the message + this.pauseBuffer.append(data) + + // abort if the pause buffer is too large + if (this.pauseBuffer.byteLength > this.maxPauseBufferLength) { + this.abort(new StreamBufferError(`Pause buffer length of ${this.pauseBuffer.byteLength} exceeded limit of ${this.maxPauseBufferLength}`)) + } + } else { + this.abort(new StreamStateError(`Stream readable was "${this.readStatus}" and not "reaable" or "paused"`)) + } + } + + /** + * Receive a reset message - close immediately for reading and writing (remote + * error) + */ + onRemoteReset (): void { + this.log.trace('on remote reset') + + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + return + } + + this.status = 'reset' + this.writeStatus = 'closed' + this.remoteWriteStatus = 'closed' + this.remoteReadStatus = 'closed' + + this.timeline.reset = Date.now() + this.timeline.closeWrite = Date.now() + this.timeline.remoteCloseWrite = Date.now() + this.timeline.remoteCloseRead = Date.now() + + if (this.pauseBuffer.byteLength === 0) { + this.readStatus = 'closed' + this.timeline.closeRead = Date.now() + } + + const err = new StreamResetError() + + this.dispatchEvent(new StreamResetEvent(err)) + } + + /** + * Called by extending classes when the remote closed its writable end + */ + onRemoteCloseWrite (): void { + if (this.remoteWriteStatus === 'closed') { + return + } + + this.log.trace('on remote close write - this.writeStatus %s', this.writeStatus) + + this.remoteWriteStatus = 'closed' + this.timeline.remoteCloseWrite = Date.now() + + this.safeDispatchEvent('remoteCloseWrite') + + this.maybeCloseRead() + + if (this.writeStatus === 'closed') { + this.onClosed() + } + } + + /** + * Called by extending classes when the remote closed its readable end + */ + onRemoteCloseRead (): void { + this.log.trace('on remote close read - this.writeStatus %s', this.writeStatus) + + this.remoteReadStatus = 'closed' + this.timeline.remoteCloseRead = Date.now() + + this.safeDispatchEvent('remoteCloseRead') + + if (this.writeStatus === 'closed') { + this.onClosed() + } + } + + /** + * This can be called by extending classes when an underlying transport + * closed. No further messages will be sent or received. + */ + onClosed (): void { + if (this.status !== 'open') { + return + } + + this.status = 'closed' + this.timeline.close = Date.now() + + this.maybeCloseRead() + + setTimeout(() => { + this.dispatchEvent(new StreamCloseEvent()) + }, 0) + } + + private maybeCloseRead (): void { + if (this.readStatus === 'readable' && this.pauseBuffer.byteLength === 0) { + this.readStatus = 'closed' + this.timeline.closeRead = Date.now() + + setTimeout(() => { + this.safeDispatchEvent('closeRead') + }, 0) + } + } + + private processSendQueue (): boolean { + // don't send data if the underlying send buffer is full + if (this.writeStatus === 'paused') { + this.log('pause sending because local write status was "paused"') + return false + } + + if (this.sendQueue.byteLength === 0) { + this.log('not sending because send queue was empty') + return true + } + + const toSend = this.sendQueue.sublist() + const totalBytes = toSend.byteLength + const { sentBytes, canSendMore } = this.sendData(toSend) + this.sendQueue.consume(sentBytes) + + if (!canSendMore) { + this.log('pausing sending because underlying stream is full') + this.writeStatus = 'paused' + return canSendMore + } + + if (sentBytes !== totalBytes) { + this.abort(new Error(`All bytes from current chunk must be sent before continuing - sent ${sentBytes}/${totalBytes}`)) + } + + return canSendMore + } + + private dispatchPauseBuffer (): void { + if (this.pauseBuffer.byteLength === 0) { + return + } + + // discard the pause buffer if our readable end is closed + if (this.readStatus === 'closing' || this.readStatus === 'closed') { + this.pauseBuffer.consume(this.pauseBuffer.byteLength) + } else if (this.readStatus === 'readable') { + const buf = this.pauseBuffer.sublist() + this.pauseBuffer.consume(buf.byteLength) + + this.dispatchEvent(new StreamMessageEvent(buf)) + } + } + + /** + * Send a data message to the remote end of the stream. Implementations of + * this method should return the number of bytes from the passed buffer that + * were sent successfully and if the underlying resource can accept more data. + * + * The implementation should always attempt to send the maximum amount of data + * possible. + * + * Returning a result that means the data was only partially sent but that the + * underlying resource can accept more data is invalid. + */ + abstract sendData (data: Uint8ArrayList): SendResult + + /** + * Send a reset message to the remote end of the stream + */ + abstract sendReset (err: Error): void + + /** + * If supported, instruct the remote end of the stream to temporarily stop + * sending data messages + */ + abstract sendPause (): void + + /** + * If supported, inform the remote end of the stream they may resume sending + * data messages + */ + abstract sendResume (): void + + /** + * Send a message to the remote end of the stream, informing them that we will + * send no more data messages. + */ + abstract sendCloseWrite (options?: AbortOptions): Promise + + /** + * If supported, send a message to the remote end of the stream, informing + * them that we will read no more data messages. + */ + abstract sendCloseRead (options?: AbortOptions): Promise +} diff --git a/packages/utils/src/abstract-multiaddr-connection.ts b/packages/utils/src/abstract-multiaddr-connection.ts new file mode 100644 index 0000000000..6c1f927ddc --- /dev/null +++ b/packages/utils/src/abstract-multiaddr-connection.ts @@ -0,0 +1,47 @@ +import { AbstractMessageStream } from './abstract-message-stream.ts' +import type { MessageStreamInit } from './abstract-message-stream.ts' +import type { CounterGroup, Logger, MultiaddrConnection, MessageStreamDirection } from '@libp2p/interface' +import type { Multiaddr } from '@multiformats/multiaddr' + +export interface AbstractMultiaddrConnectionInit extends Omit { + remoteAddr: Multiaddr + direction: MessageStreamDirection + log: Logger + inactivityTimeout?: number + localAddr?: Multiaddr + metricPrefix?: string + metrics?: CounterGroup +} + +export abstract class AbstractMultiaddrConnection extends AbstractMessageStream implements MultiaddrConnection { + public remoteAddr: Multiaddr + + private metricPrefix: string + private metrics?: CounterGroup + + constructor (init: AbstractMultiaddrConnectionInit) { + super(init) + + this.metricPrefix = init.metricPrefix ?? '' + this.metrics = init.metrics + this.remoteAddr = init.remoteAddr + + this.addEventListener('close', (evt) => { + this.metrics?.increment({ [`${this.metricPrefix}end`]: true }) + + if (evt.error != null) { + if (evt.local) { + this.metrics?.increment({ [`${this.metricPrefix}abort`]: true }) + } else { + this.metrics?.increment({ [`${this.metricPrefix}reset`]: true }) + } + } else { + if (evt.local) { + this.metrics?.increment({ [`${this.metricPrefix}_local_close`]: true }) + } else { + this.metrics?.increment({ [`${this.metricPrefix}_remote_close`]: true }) + } + } + }) + } +} diff --git a/packages/utils/src/abstract-stream-muxer.ts b/packages/utils/src/abstract-stream-muxer.ts new file mode 100644 index 0000000000..2aff43212c --- /dev/null +++ b/packages/utils/src/abstract-stream-muxer.ts @@ -0,0 +1,171 @@ +import { MuxerClosedError, TypedEventEmitter } from '@libp2p/interface' +import { raceSignal } from 'race-signal' +import { isPromise } from './is-promise.ts' +import type { AbstractStream } from './abstract-stream.ts' +import type { AbortOptions, CreateStreamOptions, Logger, MessageStream, Stream, StreamMuxer, StreamMuxerEvents, StreamMuxerStatus } from '@libp2p/interface' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface AbstractStreamMuxerInit { + /** + * The protocol name for the muxer + */ + protocol: string + + /** + * The name of the muxer, used to create a new logging scope from the passed + * connection's logger + */ + name: string +} + +export abstract class AbstractStreamMuxer extends TypedEventEmitter> implements StreamMuxer { + public streams: MuxedStream[] + public protocol: string + public status: StreamMuxerStatus + + protected log: Logger + protected maConn: MessageStream + + constructor (maConn: MessageStream, init: AbstractStreamMuxerInit) { + super() + + this.maConn = maConn + this.protocol = init.protocol + this.streams = [] + this.status = 'open' + this.log = maConn.log.newScope(init.name) + + // read/write all data from/to underlying maConn + this.maConn.addEventListener('message', (evt) => { + try { + this.onData(evt.data) + } catch (err: any) { + this.abort(err) + this.maConn.abort(err) + } + }) + + // close muxer when underlying maConn closes + this.maConn.addEventListener('close', (evt) => { + if (this.status === 'open') { + this.onTransportClosed() + } + }) + + // signal stream writers when the underlying connection can accept more data + this.maConn.addEventListener('drain', () => { + this.log('underlying stream drained, signal %d streams to continue writing', this.streams.length) + + this.streams.forEach(stream => { + stream.safeDispatchEvent('drain') + }) + }) + } + + send (data: Uint8Array | Uint8ArrayList): boolean { + return this.maConn.send(data) + } + + async close (options?: AbortOptions): Promise { + if (this.status === 'closed') { + return + } + + this.status = 'closing' + + await raceSignal(Promise.all( + [...this.streams].map(async s => { + await s.close(options) + }) + ), options?.signal) + + this.status = 'closed' + } + + abort (err: Error): void { + if (this.status === 'closed') { + return + } + + this.status = 'closing' + + ;[...this.streams].forEach(s => { + s.abort(err) + }) + + this.status = 'closed' + } + + onTransportClosed (): void { + this.status = 'closing' + + try { + [...this.streams].forEach(stream => { + stream.onMuxerClosed() + }) + } catch (err: any) { + this.abort(err) + } + + this.status = 'closed' + } + + async createStream (options?: CreateStreamOptions): Promise { + if (this.status !== 'open') { + throw new MuxerClosedError() + } + + let stream = this.onCreateStream(options ?? {}) + + if (isPromise(stream)) { + stream = await stream + } + + this.streams.push(stream) + this.cleanUpStream(stream) + + return stream + } + + /** + * Extending classes should invoke this method when a new stream was created + * by the remote muxer + */ + onRemoteStream (stream: MuxedStream): void { + this.streams.push(stream) + this.cleanUpStream(stream) + + this.safeDispatchEvent('stream', { + detail: stream + }) + } + + private cleanUpStream (stream: Stream): void { + const onEnd = (): void => { + const index = this.streams.findIndex(s => s === stream) + + if (index !== -1) { + this.streams.splice(index, 1) + } + + // TODO: standardize metrics + // this.metrics?.increment({ [`${stream.direction}_stream_end`]: true }) + // this.metrics?.increment({ [`${stream.direction}_stream_error`]: true }) + } + + // TODO: standardize metrics + // this.metrics?.increment({ [`${stream.direction}_stream`]: true }) + + stream.addEventListener('close', onEnd) + } + + /** + * A new outgoing stream needs to be created + */ + abstract onCreateStream (options: CreateStreamOptions): MuxedStream | Promise + + /** + * Multiplexed data was received from the remote muxer + */ + abstract onData (data: Uint8Array | Uint8ArrayList): void +} diff --git a/packages/utils/src/abstract-stream.ts b/packages/utils/src/abstract-stream.ts index 19b9569c29..0e3ea60e15 100644 --- a/packages/utils/src/abstract-stream.ts +++ b/packages/utils/src/abstract-stream.ts @@ -1,515 +1,51 @@ -import { StreamResetError, StreamStateError } from '@libp2p/interface' -import { pushable } from 'it-pushable' -import defer from 'p-defer' -import { raceSignal } from 'race-signal' -import { Uint8ArrayList } from 'uint8arraylist' -import { closeSource } from './close-source.js' -import type { AbortOptions, Direction, ReadStatus, Stream, StreamStatus, StreamTimeline, WriteStatus } from '@libp2p/interface' -import type { Logger } from '@libp2p/logger' -import type { Pushable } from 'it-pushable' -import type { Source } from 'it-stream-types' -import type { DeferredPromise } from 'p-defer' +import { AbstractMessageStream } from './abstract-message-stream.js' +import type { MessageStreamInit } from './abstract-message-stream.js' +import type { Stream } from '@libp2p/interface' -const DEFAULT_SEND_CLOSE_WRITE_TIMEOUT = 5000 - -export interface AbstractStreamInit { +export interface AbstractStreamInit extends MessageStreamInit { /** * A unique identifier for this stream */ id: string /** - * The stream direction - */ - direction: Direction - - /** - * A Logger implementation used to log stream-specific information - */ - log: Logger - - /** - * User specific stream metadata - */ - metadata?: Record - - /** - * Invoked when the stream ends - */ - onEnd?(err?: Error): void - - /** - * Invoked when the readable end of the stream is closed - */ - onCloseRead?(): void - - /** - * Invoked when the writable end of the stream is closed - */ - onCloseWrite?(): void - - /** - * Invoked when the stream has been reset by the remote - */ - onReset?(): void - - /** - * Invoked when the stream has errored + * The protocol name for the stream, if it is known */ - onAbort?(err: Error): void - - /** - * How long to wait in ms for stream data to be written to the underlying - * connection when closing the writable end of the stream. - * - * @default 500 - */ - closeTimeout?: number - - /** - * After the stream sink has closed, a limit on how long it takes to send - * a close-write message to the remote peer. - */ - sendCloseWriteTimeout?: number + protocol?: string } -function isPromise (thing: any): thing is Promise { - if (thing == null) { - return false - } - - return typeof thing.then === 'function' && - typeof thing.catch === 'function' && - typeof thing.finally === 'function' -} - -export abstract class AbstractStream implements Stream { +export abstract class AbstractStream extends AbstractMessageStream implements Stream { public id: string - public direction: Direction - public timeline: StreamTimeline - public protocol?: string - public metadata: Record - public source: AsyncGenerator - public status: StreamStatus - public readStatus: ReadStatus - public writeStatus: WriteStatus - public readonly log: Logger - - private readonly sinkController: AbortController - private readonly sinkEnd: DeferredPromise - private readonly closed: DeferredPromise - private endErr: Error | undefined - private readonly streamSource: Pushable - private readonly onEnd?: (err?: Error) => void - private readonly onCloseRead?: () => void - private readonly onCloseWrite?: () => void - private readonly onReset?: () => void - private readonly onAbort?: (err: Error) => void - private readonly sendCloseWriteTimeout: number - private sendingData?: DeferredPromise + public protocol: string constructor (init: AbstractStreamInit) { - this.sinkController = new AbortController() - this.sinkEnd = defer() - this.closed = defer() - this.log = init.log - - // stream status - this.status = 'open' - this.readStatus = 'ready' - this.writeStatus = 'ready' + super(init) this.id = init.id - this.metadata = init.metadata ?? {} - this.direction = init.direction - this.timeline = { - open: Date.now() - } - this.sendCloseWriteTimeout = init.sendCloseWriteTimeout ?? DEFAULT_SEND_CLOSE_WRITE_TIMEOUT - - this.onEnd = init.onEnd - this.onCloseRead = init.onCloseRead - this.onCloseWrite = init.onCloseWrite - this.onReset = init.onReset - this.onAbort = init.onAbort - - this.source = this.streamSource = pushable({ - onEnd: (err) => { - if (err != null) { - this.log.trace('source ended with error', err) - } else { - this.log.trace('source ended') - } - - this.onSourceEnd(err) - } - }) - - // necessary because the libp2p upgrader wraps the sink function - this.sink = this.sink.bind(this) - } - - async sink (source: Source): Promise { - if (this.writeStatus !== 'ready') { - throw new StreamStateError(`writable end state is "${this.writeStatus}" not "ready"`) - } - - try { - this.writeStatus = 'writing' - - const options: AbortOptions = { - signal: this.sinkController.signal - } - - if (this.direction === 'outbound') { // If initiator, open a new stream - const res = this.sendNewStream(options) - - if (isPromise(res)) { - await res - } - } - - const abortListener = (): void => { - closeSource(source, this.log) - } - - try { - this.sinkController.signal.addEventListener('abort', abortListener) - - this.log.trace('sink reading from source') - - for await (let data of source) { - data = data instanceof Uint8Array ? new Uint8ArrayList(data) : data - - const res = this.sendData(data, options) - - if (isPromise(res)) { - this.sendingData = defer() - await res - this.sendingData.resolve() - this.sendingData = undefined - } - } - } finally { - this.sinkController.signal.removeEventListener('abort', abortListener) - } - - this.log.trace('sink finished reading from source, write status is "%s"', this.writeStatus) - - if (this.writeStatus === 'writing') { - this.writeStatus = 'closing' - - this.log.trace('send close write to remote') - await this.sendCloseWrite({ - signal: AbortSignal.timeout(this.sendCloseWriteTimeout) - }) - - this.writeStatus = 'closed' - } - - this.onSinkEnd() - } catch (err: any) { - this.log.trace('sink ended with error, calling abort with error', err) - this.abort(err) - - throw err - } finally { - this.log.trace('resolve sink end') - this.sinkEnd.resolve() - } - } - - protected onSourceEnd (err?: Error): void { - if (this.timeline.closeRead != null) { - return - } - - this.timeline.closeRead = Date.now() - this.readStatus = 'closed' - - if (err != null && this.endErr == null) { - this.endErr = err - } - - this.onCloseRead?.() - - if (this.timeline.closeWrite != null) { - this.log.trace('source and sink ended') - this.timeline.close = Date.now() - - if (this.status !== 'aborted' && this.status !== 'reset') { - this.status = 'closed' - } - - if (this.onEnd != null) { - this.onEnd(this.endErr) - } - - this.closed.resolve() - } else { - this.log.trace('source ended, waiting for sink to end') - } - } - - protected onSinkEnd (err?: Error): void { - if (this.timeline.closeWrite != null) { - return - } - - this.timeline.closeWrite = Date.now() - this.writeStatus = 'closed' - - if (err != null && this.endErr == null) { - this.endErr = err - } - - this.onCloseWrite?.() - - if (this.timeline.closeRead != null) { - this.log.trace('sink and source ended') - this.timeline.close = Date.now() - - if (this.status !== 'aborted' && this.status !== 'reset') { - this.status = 'closed' - } - - if (this.onEnd != null) { - this.onEnd(this.endErr) - } - - this.closed.resolve() - } else { - this.log.trace('sink ended, waiting for source to end') - } - } - - // Close for both Reading and Writing - async close (options?: AbortOptions): Promise { - if (this.status !== 'open') { - return - } - - this.log.trace('closing gracefully') - - this.status = 'closing' - - // wait for read and write ends to close - await raceSignal(Promise.all([ - this.closeWrite(options), - this.closeRead(options), - this.closed.promise - ]), options?.signal) - - this.status = 'closed' - - this.log.trace('closed gracefully') - } - - async closeRead (options: AbortOptions = {}): Promise { - if (this.readStatus === 'closing' || this.readStatus === 'closed') { - return - } - - this.log.trace('closing readable end of stream with starting read status "%s"', this.readStatus) - - const readStatus = this.readStatus - this.readStatus = 'closing' - - if (this.status !== 'reset' && this.status !== 'aborted' && this.timeline.closeRead == null) { - this.log.trace('send close read to remote') - await this.sendCloseRead(options) - } - - if (readStatus === 'ready') { - this.log.trace('ending internal source queue with %d queued bytes', this.streamSource.readableLength) - this.streamSource.end() - } - - this.log.trace('closed readable end of stream') - } - - async closeWrite (options: AbortOptions = {}): Promise { - if (this.writeStatus === 'closing' || this.writeStatus === 'closed') { - return - } - - this.log.trace('closing writable end of stream with starting write status "%s"', this.writeStatus) - - if (this.writeStatus === 'ready') { - this.log.trace('sink was never sunk, sink an empty array') - - await raceSignal(this.sink([]), options.signal) - } - - if (this.writeStatus === 'writing') { - // try to let sending outgoing data succeed - if (this.sendingData != null) { - await raceSignal(this.sendingData.promise, options.signal) - } - - // stop reading from the source passed to `.sink` - this.log.trace('aborting source passed to .sink') - this.sinkController.abort() - await raceSignal(this.sinkEnd.promise, options.signal) - } - - this.writeStatus = 'closed' - - this.log.trace('closed writable end of stream') + this.protocol = init.protocol ?? '' } /** - * Close immediately for reading and writing and send a reset message (local - * error) + * The muxer this stream was created by has closed - this stream should exit + * without sending any further messages. Any unread data can still be read but + * otherwise this stream is now closed. */ - abort (err: Error): void { - if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { - return + onMuxerClosed (): void { + if (this.remoteReadStatus !== 'closed') { + this.remoteReadStatus = 'closed' + this.timeline.remoteCloseRead = Date.now() } - this.log('abort with error', err) - - // try to send a reset message - this.log('try to send reset to remote') - const res = this.sendReset() - - if (isPromise(res)) { - res.catch((err) => { - this.log.error('error sending reset message', err) - }) + if (this.remoteWriteStatus !== 'closed') { + this.remoteWriteStatus = 'closed' + this.timeline.remoteCloseWrite = Date.now() } - this.status = 'aborted' - this.timeline.abort = Date.now() - this._closeSinkAndSource(err) - this.onAbort?.(err) - } - - /** - * Receive a reset message - close immediately for reading and writing (remote - * error) - */ - reset (): void { - if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { - return + if (this.writeStatus !== 'closed') { + this.writeStatus = 'closed' + this.timeline.closeWrite = Date.now() } - const err = new StreamResetError('stream reset') - - this.status = 'reset' - this.timeline.reset = Date.now() - this._closeSinkAndSource(err) - this.onReset?.() - } - - _closeSinkAndSource (err?: Error): void { - this._closeSink(err) - this._closeSource(err) - } - - _closeSink (err?: Error): void { - // if the sink function is running, cause it to end - if (this.writeStatus === 'writing') { - this.log.trace('end sink source') - this.sinkController.abort() - } - - this.onSinkEnd(err) - } - - _closeSource (err?: Error): void { - // if the source is not ending, end it - if (this.readStatus !== 'closing' && this.readStatus !== 'closed') { - this.log.trace('ending source with %d bytes to be read by consumer', this.streamSource.readableLength) - this.readStatus = 'closing' - this.streamSource.end(err) - } + this.onClosed() } - - /** - * The remote closed for writing so we should expect to receive no more - * messages - */ - remoteCloseWrite (): void { - if (this.readStatus === 'closing' || this.readStatus === 'closed') { - this.log('received remote close write but local source is already closed') - return - } - - this.log.trace('remote close write') - this._closeSource() - } - - /** - * The remote closed for reading so we should not send any more - * messages - */ - remoteCloseRead (): void { - if (this.writeStatus === 'closing' || this.writeStatus === 'closed') { - this.log('received remote close read but local sink is already closed') - return - } - - this.log.trace('remote close read') - this._closeSink() - } - - /** - * The underlying muxer has closed, no more messages can be sent or will - * be received, close immediately to free up resources - */ - destroy (): void { - if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { - this.log('received destroy but we are already closed') - return - } - - this.log.trace('stream destroyed') - - this._closeSinkAndSource() - } - - /** - * When an extending class reads data from it's implementation-specific source, - * call this method to allow the stream consumer to read the data. - */ - sourcePush (data: Uint8ArrayList): void { - this.streamSource.push(data) - } - - /** - * Returns the amount of unread data - can be used to prevent large amounts of - * data building up when the stream consumer is too slow. - */ - sourceReadableLength (): number { - return this.streamSource.readableLength - } - - /** - * Send a message to the remote muxer informing them a new stream is being - * opened - */ - abstract sendNewStream (options?: AbortOptions): void | Promise - - /** - * Send a data message to the remote muxer - */ - abstract sendData (buf: Uint8ArrayList, options?: AbortOptions): void | Promise - - /** - * Send a reset message to the remote muxer - */ - abstract sendReset (options?: AbortOptions): void | Promise - - /** - * Send a message to the remote muxer, informing them no more data messages - * will be sent by this end of the stream - */ - abstract sendCloseWrite (options?: AbortOptions): void | Promise - - /** - * Send a message to the remote muxer, informing them no more data messages - * will be read by this end of the stream - */ - abstract sendCloseRead (options?: AbortOptions): void | Promise } diff --git a/packages/utils/src/array-equals.ts b/packages/utils/src/array-equals.ts deleted file mode 100644 index e94499acbb..0000000000 --- a/packages/utils/src/array-equals.ts +++ /dev/null @@ -1,34 +0,0 @@ -/** - * @packageDocumentation - * - * Provides strategies ensure arrays are equivalent. - * - * @example - * - * ```typescript - * import { arrayEquals } from '@libp2p/utils/array-equals' - * import { multiaddr } from '@multformats/multiaddr' - * - * const ma1 = multiaddr('/ip4/127.0.0.1/tcp/9000'), - * const ma2 = multiaddr('/ip4/82.41.53.1/tcp/9000') - * - * console.info(arrayEquals([ma1], [ma1])) // true - * console.info(arrayEquals([ma1], [ma2])) // false - * ``` - */ - -/** - * Verify if two arrays of non primitive types with the "equals" function are equal. - * Compatible with multiaddr, peer-id and others. - */ -export function arrayEquals (a: any[], b: any[]): boolean { - const sort = (a: any, b: any): number => a.toString().localeCompare(b.toString()) - - if (a.length !== b.length) { - return false - } - - b.sort(sort) - - return a.sort(sort).every((item, index) => b[index].equals(item)) -} diff --git a/packages/utils/src/close-source.ts b/packages/utils/src/close-source.ts deleted file mode 100644 index 34738838d5..0000000000 --- a/packages/utils/src/close-source.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { getIterator } from 'get-iterator' -import { isPromise } from './is-promise.js' -import type { Logger } from '@libp2p/logger' -import type { Source } from 'it-stream-types' - -export function closeSource (source: Source, log: Logger): void { - const res = getIterator(source).return?.() - - if (isPromise(res)) { - res.catch(err => { - log.error('could not cause iterator to return', err) - }) - } -} diff --git a/packages/utils/src/close.ts b/packages/utils/src/close.ts deleted file mode 100644 index eeba0e59b4..0000000000 --- a/packages/utils/src/close.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { Connection, Stream, AbortOptions } from '@libp2p/interface' - -/** - * Close the passed stream, falling back to aborting the stream if closing - * cleanly fails. - */ -export async function safelyCloseStream (stream?: Stream, options?: AbortOptions): Promise { - try { - await stream?.close(options) - } catch (err: any) { - stream?.abort(err) - } -} - -/** - * These are speculative protocols that are run automatically on connection open - * so are usually not the reason the connection was opened. - * - * Consequently when requested it should be safe to close connections that only - * have these protocol streams open. - */ -const DEFAULT_CLOSABLE_PROTOCOLS = [ - // identify - '/ipfs/id/1.0.0', - - // identify-push - '/ipfs/id/push/1.0.0', - - // autonat - '/libp2p/autonat/1.0.0', - - // dcutr - '/libp2p/dcutr' -] - -export interface SafelyCloseConnectionOptions extends AbortOptions { - /** - * Only close the stream if it either has no protocol streams open or only - * ones in this list. - * - * @default ['/ipfs/id/1.0.0'] - */ - closableProtocols?: string[] -} - -/** - * Close the passed connection if it has no streams, or only closable protocol - * streams, falling back to aborting the connection if closing it cleanly fails. - */ -export async function safelyCloseConnectionIfUnused (connection?: Connection, options?: SafelyCloseConnectionOptions): Promise { - const streamProtocols = connection?.streams?.map(stream => stream.protocol) ?? [] - const closableProtocols = options?.closableProtocols ?? DEFAULT_CLOSABLE_PROTOCOLS - - // if the connection has protocols not in the closable protocols list, do not - // close the connection - if (streamProtocols.filter(proto => proto != null && !closableProtocols.includes(proto)).length > 0) { - return - } - - try { - await connection?.close(options) - } catch (err: any) { - connection?.abort(err) - } -} diff --git a/packages/utils/src/errors.ts b/packages/utils/src/errors.ts index f801149fe1..60ce7ac6ad 100644 --- a/packages/utils/src/errors.ts +++ b/packages/utils/src/errors.ts @@ -27,3 +27,8 @@ export class QueueFullError extends Error { this.name = 'QueueFullError' } } + +export class UnexpectedEOFError extends Error { + static name = 'UnexpectedEOFError' + name = 'UnexpectedEOFError' +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 6628b48d6d..db406d747f 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -4,4 +4,36 @@ * This module contains utility functions used by libp2p modules. */ -export {} +export * from './filters/index.js' +export * from './multiaddr/index.js' +export * from './queue/index.js' +export * from './abstract-message-stream.js' +export * from './abstract-multiaddr-connection.js' +export * from './abstract-stream-muxer.js' +export * from './abstract-stream.js' +export * from './adaptive-timeout.js' +export * from './debounce.js' +export * from './errors.js' +export * from './get-thin-waist-addresses.js' +export * from './global-unicast-ip.js' +export * from './ip-port-to-multiaddr.js' +export * from './is-async-generator.js' +export * from './is-generator.js' +export * from './is-promise.js' +export * from './length-prefixed-decoder.js' +export * from './link-local-ip.js' +export * from './merge-options.js' +export * from './mock-muxer.js' +export * from './mock-stream.js' +export * from './moving-average.js' +export * from './multiaddr-connection-pair.js' +export * from './peer-queue.js' +export * from './priority-queue.js' +export * from './private-ip.js' +export * from './rate-limiter.js' +export * from './repeating-task.js' +export * from './stream-pair.js' +export * from './stream-utils.js' +export * from './socket-writer.js' +export * from './tracked-list.js' +export * from './tracked-map.js' diff --git a/packages/utils/src/length-prefixed-decoder.ts b/packages/utils/src/length-prefixed-decoder.ts new file mode 100644 index 0000000000..8f97a2d44c --- /dev/null +++ b/packages/utils/src/length-prefixed-decoder.ts @@ -0,0 +1,98 @@ +import { InvalidParametersError } from '@libp2p/interface' +import * as varint from 'uint8-varint' +import { Uint8ArrayList } from 'uint8arraylist' +import { InvalidMessageLengthError } from './stream-utils.ts' + +const DEFAULT_MAX_BUFFER_SIZE = 1024 * 1024 * 4 +const DEFAULT_MAX_DATA_LENGTH = 1024 * 1024 * 4 + +export interface LengthPrefixedDecoderInit { + /** + * How large the internal buffer is allowed to grow - attempting to store more + * data than this will throw + */ + maxBufferSize?: number + + /** + * Throw an error if the message that would be read from the buffer is larger + * than this value + */ + maxDataLength?: number + + /** + * Read a varint from the buffer + */ + lengthDecoder?(data: Uint8ArrayList | Uint8Array): number + + /** + * Return how many bytes it takes to encode the passed value + */ + encodingLength?(length: number): number +} + +/** + * Decode length-prefixed data from a buffer + */ +export class LengthPrefixedDecoder { + private readonly buffer: Uint8ArrayList + private readonly maxBufferSize: number + private readonly lengthDecoder: (data: Uint8ArrayList | Uint8Array) => number + private readonly maxDataLength: number + private readonly encodingLength: (length: number) => number + + constructor (init: LengthPrefixedDecoderInit = {}) { + this.buffer = new Uint8ArrayList() + this.maxBufferSize = init.maxBufferSize ?? DEFAULT_MAX_BUFFER_SIZE + this.maxDataLength = init.maxDataLength ?? DEFAULT_MAX_DATA_LENGTH + this.lengthDecoder = init.lengthDecoder ?? varint.decode + this.encodingLength = init.encodingLength ?? varint.encodingLength + } + + /** + * Decodes length-prefixed data + */ + * decode (buf: Uint8Array | Uint8ArrayList): Generator { + this.buffer.append(buf) + + if (this.buffer.byteLength > this.maxBufferSize) { + throw new InvalidParametersError(`Buffer length limit exceeded - ${this.buffer.byteLength}/${this.maxBufferSize}`) + } + + // Loop to consume as many bytes from the buffer as possible + // Eg: when a single chunk contains several frames + while (true) { + let dataLength: number + + try { + dataLength = this.lengthDecoder(this.buffer) + } catch (err) { + if (err instanceof RangeError) { + // ignore errors where we don't have enough data to read the length + // prefix + break + } + + throw err + } + + if (dataLength < 0 || dataLength > this.maxDataLength) { + throw new InvalidMessageLengthError('Invalid message length') + } + + const lengthLength = this.encodingLength(dataLength) + const chunkLength = lengthLength + dataLength + + if (this.buffer.byteLength >= chunkLength) { + const buf = this.buffer.sublist(lengthLength, chunkLength) + + this.buffer.consume(chunkLength) + + if (buf.byteLength > 0) { + yield buf + } + } else { + break + } + } + } +} diff --git a/packages/utils/src/message-queue.ts b/packages/utils/src/message-queue.ts new file mode 100644 index 0000000000..7c74e46a09 --- /dev/null +++ b/packages/utils/src/message-queue.ts @@ -0,0 +1,94 @@ +import delay from 'delay' +import { TypedEventEmitter } from 'main-event' +import { Queue } from './queue/index.js' +import type { Logger } from '@libp2p/interface' + +export interface MessageQueueMessages { + /** + * Emitted when the queue is empty + */ + drain: Event +} + +export interface MessageQueueInit { + /** + * How much delay there should be between each message send in ms (note that + * even 0 introduces a small delay) + * + * @default 0 + */ + delay?: number + + /** + * How many messages to hold in the send queue before applying backpressure to + * the sender + */ + capacity?: number +} + +/** + * Accepts events to emit after a short delay, and with a configurable maximum + * queue capacity after which the send method will return false to let us + * simulate write backpressure. + */ +export class MessageQueue extends TypedEventEmitter { + private queue: Queue + private capacity: number + private delay: number + private needsDrain: boolean + private log: Logger + + constructor (init: MessageQueueInit & { log: Logger }) { + super() + + this.needsDrain = false + this.queue = new Queue({ + concurrency: 1 + }) + this.capacity = init.capacity ?? 5 + this.delay = init.delay ?? 0 + this.log = init.log + + this.queue.addEventListener('idle', () => { + if (this.needsDrain) { + this.log('network send queue drained') + this.safeDispatchEvent('drain') + this.needsDrain = false + } + }) + } + + send (evt: Event): boolean { + this.queue.add(async () => { + if (this.delay > 0) { + await delay(this.delay) + } + + this.dispatchEvent(evt) + }) + + if (this.queue.size >= this.capacity) { + this.log('network send queue full') + this.needsDrain = true + return false + } + + return true + } + + pause (): void { + this.queue.pause() + } + + resume (): void { + this.queue.resume() + } + + onIdle (): Promise { + return this.queue.onIdle() + } + + size (): number { + return this.queue.size + } +} diff --git a/packages/utils/src/mock-muxer.ts b/packages/utils/src/mock-muxer.ts new file mode 100644 index 0000000000..e78ea06a92 --- /dev/null +++ b/packages/utils/src/mock-muxer.ts @@ -0,0 +1,278 @@ +import * as lp from 'it-length-prefixed' +import { pushable } from 'it-pushable' +import { Uint8ArrayList } from 'uint8arraylist' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import { AbstractStreamMuxer } from './abstract-stream-muxer.ts' +import { AbstractStream } from './abstract-stream.ts' +import { Queue } from './queue/index.js' +import type { SendResult } from './abstract-message-stream.ts' +import type { AbstractStreamInit } from './abstract-stream.ts' +import type { AbortOptions, MessageStreamDirection, CreateStreamOptions, StreamMuxerFactory, StreamMuxer, MultiaddrConnection } from '@libp2p/interface' +import type { Pushable } from 'it-pushable' +import type { SupportedEncodings } from 'uint8arrays/from-string' + +let streams = 0 + +interface DataMessage { + id: string + type: 'data' + chunk: string +} + +interface ResetMessage { + id: string + type: 'reset' +} + +interface CloseWriteMessage { + id: string + type: 'closeWrite' +} + +interface CloseReadMessage { + id: string + type: 'closeRead' +} + +interface CreateMessage { + id: string + type: 'create' + protocol?: string +} + +interface PauseMessage { + id: string + type: 'pause' +} + +interface ResumeMessage { + id: string + type: 'resume' +} + +type StreamMessage = DataMessage | ResetMessage | CloseWriteMessage | CloseReadMessage | CreateMessage | PauseMessage | ResumeMessage + +export interface MockMuxedStreamInit extends AbstractStreamInit { + sendMessage(message: StreamMessage): boolean + encoding: SupportedEncodings +} + +class MockMuxedStream extends AbstractStream { + private readonly sendMessage: (message: StreamMessage) => boolean + private dataQueue: Queue + private encoding: SupportedEncodings + + constructor (init: MockMuxedStreamInit) { + super(init) + + this.sendMessage = init.sendMessage + this.encoding = init.encoding + this.dataQueue = new Queue({ + concurrency: 1 + }) + + if (this.direction === 'outbound') { + this.sendMessage({ + id: this.id, + type: 'create', + protocol: this.protocol + }) + } + } + + sendData (data: Uint8ArrayList): SendResult { + const canSendMore = this.sendMessage({ + id: this.id, + type: 'data', + chunk: uint8ArrayToString(data.subarray(), this.encoding) + }) + + return { + sentBytes: data.byteLength, + canSendMore + } + } + + sendReset (): void { + this.sendMessage({ + id: this.id, + type: 'reset' + }) + } + + async sendCloseWrite (options?: AbortOptions): Promise { + this.sendMessage({ + id: this.id, + type: 'closeWrite' + }) + + options?.signal?.throwIfAborted() + } + + async sendCloseRead (options?: AbortOptions): Promise { + this.sendMessage({ + id: this.id, + type: 'closeRead' + }) + + options?.signal?.throwIfAborted() + } + + sendPause (): void { + this.sendMessage({ + id: this.id, + type: 'pause' + }) + } + + sendResume (): void { + this.sendMessage({ + id: this.id, + type: 'resume' + }) + } + + onRemotePaused (): void { + this.dataQueue.pause() + } + + onRemoteResumed (): void { + this.dataQueue.resume() + } +} + +interface MockMuxerInit { + /** + * How long the input queue can grow + */ + maxInputQueueSize?: number + + /** + * How to encode data message + * + * @default base64 + */ + encoding?: SupportedEncodings +} + +class MockMuxer extends AbstractStreamMuxer { + private input: Pushable + private maxInputQueueSize: number + private encoding: SupportedEncodings + + constructor (maConn: MultiaddrConnection, init: MockMuxerInit) { + super(maConn, { + ...init, + protocol: '/mock-muxer/1.0.0', + name: 'mock-muxer' + }) + + this.maxInputQueueSize = init.maxInputQueueSize ?? 1024 * 1024 * 10 + this.encoding = init.encoding ?? 'base64' + this.input = pushable() + this.sendMessage = this.sendMessage.bind(this) + + Promise.resolve() + .then(async () => { + for await (const buf of lp.decode(this.input)) { + this.onMessage(JSON.parse(uint8ArrayToString(buf.subarray()))) + } + }) + .catch(err => { + this.abort(err) + }) + } + + onData (data: Uint8Array | Uint8ArrayList): void { + if (this.input.readableLength >= this.maxInputQueueSize) { + this.abort(new Error(`Input queue exceeded maximum size ${this.input.readableLength} >= ${this.maxInputQueueSize}`)) + return + } + + this.input.push(data) + } + + sendMessage (message: StreamMessage): boolean { + this.log('send message %o', message) + + const json = JSON.stringify(message) + const buf = uint8ArrayFromString(json) + const encoded = lp.encode.single(buf) + + return this.send(encoded) + } + + onMessage (message: StreamMessage): void { + this.log('incoming message %o', message) + let stream: MockMuxedStream | undefined = this.streams.find(s => s.id === message.id) + + if (message.type === 'create') { + if (stream != null) { + throw new Error(`Already had stream for ${message.id}`) + } + + this.log('create stream inbound %s', message.id) + stream = this._createStream(message.id, 'inbound', { + protocol: message.protocol + }) + + this.onRemoteStream(stream) + } + + if (stream == null) { + this.log.error(`No stream found for ${message.id}`) + return + } + + if (message.type === 'data') { + stream.onData(uint8ArrayFromString(message.chunk, this.encoding)) + } else if (message.type === 'reset') { + stream.onRemoteReset() + } else if (message.type === 'closeWrite') { + stream.onRemoteCloseWrite() + } else if (message.type === 'closeRead') { + stream.onRemoteCloseRead() + } else if (message.type === 'pause') { + stream.onRemotePaused() + } else if (message.type === 'resume') { + stream.onRemoteResumed() + } + } + + onCreateStream (options: CreateStreamOptions): MockMuxedStream { + return this._createStream(`${streams++}`, 'outbound', options) + } + + _createStream (id: string, direction: MessageStreamDirection, options: CreateStreamOptions): MockMuxedStream { + this.log('createStream %s %s', direction, id) + + return new MockMuxedStream({ + ...options, + id, + direction, + log: this.log.newScope(`stream:${direction}:${id}`), + sendMessage: this.sendMessage, + encoding: this.encoding + }) + } +} + +class MockMuxerFactory implements StreamMuxerFactory { + public protocol: string = '/mock-muxer/1.0.0' + private init: MockMuxerInit + + constructor (init: MockMuxerInit) { + this.init = init + } + + createStreamMuxer (maConn: MultiaddrConnection): StreamMuxer { + return new MockMuxer(maConn, { + ...this.init + }) + } +} + +export function mockMuxer (init: MockMuxerInit = {}): StreamMuxerFactory { + return new MockMuxerFactory(init) +} diff --git a/packages/utils/src/mock-stream.ts b/packages/utils/src/mock-stream.ts new file mode 100644 index 0000000000..5cb13f64e3 --- /dev/null +++ b/packages/utils/src/mock-stream.ts @@ -0,0 +1,111 @@ +import { StreamMessageEvent } from '@libp2p/interface' +import { defaultLogger } from '@libp2p/logger' +import { raceSignal } from 'race-signal' +import { AbstractStream } from './abstract-stream.ts' +import type { SendResult } from './abstract-message-stream.ts' +import type { MessageQueue } from './message-queue.ts' +import type { AbortOptions, MessageStreamDirection, TypedEventTarget } from '@libp2p/interface' +import type { Uint8ArrayList } from 'uint8arraylist' + +interface MockStreamMessages { + message: MessageEvent + reset: Event + closeWrite: Event + closeRead: Event +} + +interface MockStreamInit { + delay?: number + direction: MessageStreamDirection + local: MessageQueue + remote: TypedEventTarget +} + +let streamId = 0 + +export class MockStream extends AbstractStream { + private local: MessageQueue + private remote: TypedEventTarget + + constructor (init: MockStreamInit) { + const id = `${streamId++}` + + super({ + ...init, + id, + log: defaultLogger().forComponent(`libp2p:stream-pair:${init.direction}:${id}`) + }) + + this.local = init.local + this.remote = init.remote + + this.local.addEventListener('drain', () => { + this.safeDispatchEvent('drain') + }) + + this.remote.addEventListener('message', (evt) => { + if (this.status !== 'open') { + return + } + + this.onData(evt.data) + }) + this.remote.addEventListener('reset', (evt) => { + if (this.status !== 'open') { + return + } + + this.onRemoteReset() + }) + this.remote.addEventListener('closeWrite', (evt) => { + if (this.status !== 'open') { + return + } + + this.onRemoteCloseWrite() + }) + } + + sendData (data: Uint8ArrayList): SendResult { + const canSendMore = this.local.send(new StreamMessageEvent(data)) + + return { + sentBytes: data.byteLength, + canSendMore + } + } + + sendReset (): void { + this.local.send(new Event('reset')) + } + + async sendCloseWrite (options?: AbortOptions): Promise { + return raceSignal(new Promise((resolve, reject) => { + this.local.send(new Event('closeWrite')) + this.local.onIdle().then(resolve, reject) + }), options?.signal) + } + + async sendCloseRead (options?: AbortOptions): Promise { + return raceSignal(new Promise((resolve, reject) => { + this.local.send(new Event('closeRead')) + this.local.onIdle().then(resolve, reject) + }), options?.signal) + } + + sendPause (): void { + this.local.send(new Event('pause')) + } + + sendResume (): void { + this.local.send(new Event('resume')) + } + + onRemotePaused (): void { + this.local.pause() + } + + onRemoteResumed (): void { + this.local.resume() + } +} diff --git a/packages/utils/src/multiaddr-connection-pair.ts b/packages/utils/src/multiaddr-connection-pair.ts new file mode 100644 index 0000000000..712924ae2c --- /dev/null +++ b/packages/utils/src/multiaddr-connection-pair.ts @@ -0,0 +1,146 @@ +import { StreamMessageEvent } from '@libp2p/interface' +import { defaultLogger } from '@libp2p/logger' +import { multiaddr } from '@multiformats/multiaddr' +import { raceSignal } from 'race-signal' +import { AbstractMultiaddrConnection } from './abstract-multiaddr-connection.ts' +import { MessageQueue } from './message-queue.ts' +import type { SendResult } from './abstract-message-stream.ts' +import type { MessageQueueInit } from './message-queue.ts' +import type { AbortOptions, Logger, MultiaddrConnection, MessageStreamDirection, TypedEventTarget } from '@libp2p/interface' +import type { Multiaddr } from '@multiformats/multiaddr' +import type { Uint8ArrayList } from 'uint8arraylist' + +interface MockMultiaddrConnectionMessages { + message: MessageEvent + reset: Event + close: Event + pause: Event + resume: Event +} + +interface MockMultiaddrConnectionInit { + id: string, + log: Logger, + direction: MessageStreamDirection + local: MessageQueue + remote: TypedEventTarget + remoteAddr?: Multiaddr +} + +let multiaddrConnectionId = 0 + +class MockMultiaddrConnection extends AbstractMultiaddrConnection { + private local: MessageQueue + private remote: TypedEventTarget + + constructor (init: MockMultiaddrConnectionInit) { + super({ + ...init, + remoteAddr: init.remoteAddr ?? multiaddr(`/ip4/127.0.0.1/tcp/${init.id}`) + }) + + this.local = init.local + this.remote = init.remote + + this.local.addEventListener('drain', () => { + this.safeDispatchEvent('drain') + }) + + this.remote.addEventListener('message', (evt) => { + if (this.status !== 'open') { + return + } + + this.onData(evt.data) + }) + this.remote.addEventListener('reset', (evt) => { + if (this.status !== 'open') { + return + } + + this.onRemoteReset() + }) + this.remote.addEventListener('close', (evt) => { + this.onRemoteCloseWrite() + }) + this.remote.addEventListener('pause', (evt) => { + this.local.pause() + }) + this.remote.addEventListener('resume', (evt) => { + this.local.resume() + }) + } + + sendData (data: Uint8ArrayList): SendResult { + const canSendMore = this.local.send(new StreamMessageEvent(data)) + + return { + sentBytes: data.byteLength, + canSendMore + } + } + + sendReset (): void { + this.local.send(new Event('reset')) + } + + async sendCloseWrite (options?: AbortOptions): Promise { + return raceSignal(new Promise((resolve, reject) => { + this.local.send(new Event('close')) + this.local.onIdle().then(resolve, reject) + }), options?.signal) + } + + async sendCloseRead (options?: AbortOptions): Promise { + options?.signal?.throwIfAborted() + } + + sendPause (): void { + this.local.send(new Event('pause')) + } + + sendResume (): void { + this.local.send(new Event('resume')) + } +} + +export interface MultiaddrConnectionPairOptions extends MessageQueueInit { + outboundRemoteAddr?: Multiaddr + inboundRemoteAddr?: Multiaddr +} + +export function multiaddrConnectionPair (opts: MultiaddrConnectionPairOptions = {}): [MultiaddrConnection, MultiaddrConnection] { + const inboundId = `${multiaddrConnectionId++}` + const outboundId = `${multiaddrConnectionId++}` + + const outboundLog = defaultLogger().forComponent(`libp2p:mock-maconn:outbound:${inboundId}`) + const inboundLog = defaultLogger().forComponent(`libp2p:mock-maconn:inbound:${outboundId}`) + + const targetA = new MessageQueue({ + ...opts, + log: outboundLog + }) + const targetB = new MessageQueue({ + ...opts, + log: inboundLog + }) + + return [ + new MockMultiaddrConnection({ + id: inboundId, + direction: 'outbound', + local: targetA, + remote: targetB, + remoteAddr: opts?.outboundRemoteAddr, + log: outboundLog + }), + new MockMultiaddrConnection({ + id: outboundId, + direction: 'inbound', + local: targetB, + remote: targetA, + remoteAddr: opts?.inboundRemoteAddr, + log: inboundLog + }) + ] +} diff --git a/packages/utils/src/multiaddr/index.ts b/packages/utils/src/multiaddr/index.ts new file mode 100644 index 0000000000..9ceb5c8d0f --- /dev/null +++ b/packages/utils/src/multiaddr/index.ts @@ -0,0 +1,6 @@ +export * from './is-global-unicast.js' +export * from './is-ip-based.js' +export * from './is-link-local.js' +export * from './is-loopback.js' +export * from './is-network-address.js' +export * from './is-private.js' diff --git a/packages/utils/src/queue/index.ts b/packages/utils/src/queue/index.ts index b78b7a9bc7..67147a7d87 100644 --- a/packages/utils/src/queue/index.ts +++ b/packages/utils/src/queue/index.ts @@ -131,6 +131,7 @@ export class Queue> private pending: number private readonly sort?: Comparator> + private paused: boolean constructor (init: QueueInit = {}) { super() @@ -138,6 +139,7 @@ export class Queue { + consume (key: string, pointsToConsume: number = 1, options: GetKeySecDurationOptions = {}): RateLimiterResult { const rlKey = this.getKey(key) const secDuration = this._getKeySecDuration(options) let res = this.memoryStorage.incrby(rlKey, pointsToConsume, secDuration) @@ -93,14 +74,6 @@ export class RateLimiter { } throw new RateLimitError('Rate limit exceeded', res) - } else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) { - // Execute evenly - let delayMs = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2)) - if (delayMs < this.execEvenlyMinDelayMs) { - delayMs = res.consumedPoints * this.execEvenlyMinDelayMs - } - - await delay(delayMs) } return res @@ -241,8 +214,8 @@ export class MemoryStorage { this.storage.delete(key) }, durationMs) - if (record.timeoutId.unref != null) { - record.timeoutId.unref() + if ((record.timeoutId as any).unref != null) { + (record.timeoutId as any).unref() } } diff --git a/packages/utils/src/socket-writer.browser.ts b/packages/utils/src/socket-writer.browser.ts new file mode 100644 index 0000000000..494c7f6a7b --- /dev/null +++ b/packages/utils/src/socket-writer.browser.ts @@ -0,0 +1,3 @@ +export function socketWriter (): void { + throw new Error('Unsupported in browsers') +} diff --git a/packages/utils/src/socket-writer.ts b/packages/utils/src/socket-writer.ts new file mode 100644 index 0000000000..9dcb457230 --- /dev/null +++ b/packages/utils/src/socket-writer.ts @@ -0,0 +1,61 @@ +import stream from 'node:stream' +import { Uint8ArrayList } from 'uint8arraylist' + +export interface SocketWriter { + /** + * Write any available data into the socket, if the socket's internal write + * buffer has available capacity + */ + pull (): boolean + + /** + * Write data into the socket, returns false if the socket's internal write + * buffer is at capacity + */ + write (data: Uint8Array | Uint8Array[] | Uint8ArrayList): boolean +} + +export function socketWriter (socket: stream.Duplex): SocketWriter { + const queue = new Uint8ArrayList() + + return { + pull (): boolean { + if (socket.writableNeedDrain) { + return false + } + + for (const buf of queue) { + queue.consume(buf.byteLength) + + if (!socket.write(buf)) { + // continue writing after drain event. this is a synchronous operation + // so it will not interleave with the `this.writeToSocket()` + // invocation in this.sendData so all data will be sent in-order + if (queue.byteLength > 0) { + socket.once('drain', () => { + this.pull() + }) + } + + return false + } + } + + return true + }, + + write (data: Uint8Array | Uint8Array[] | Uint8ArrayList): boolean { + if (Array.isArray(data)) { + queue.appendAll(data) + } else { + queue.append(data) + } + + if (socket.writableNeedDrain) { + return false + } + + return this.pull() + } + } +} diff --git a/packages/utils/src/stream-pair.ts b/packages/utils/src/stream-pair.ts new file mode 100644 index 0000000000..489e78a0fe --- /dev/null +++ b/packages/utils/src/stream-pair.ts @@ -0,0 +1,60 @@ +import { raceEvent } from 'race-event' +import { mockMuxer } from './mock-muxer.ts' +import { multiaddrConnectionPair } from './multiaddr-connection-pair.ts' +import { echo } from './stream-utils.ts' +import type { Stream } from '@libp2p/interface' + +export interface StreamPairOptions { + /** + * How long to wait in ms before sending messages + * + * @default 1 + */ + delay?: number + + /** + * If more than this many messages are sent within delay, write backpressure + * will be applied + */ + capacity?: number + + /** + * Simulate having pre-negotiated a protocol by passing it here + */ + protocol?: string +} + +/** + * Returns two streams connected to each other with a slight delay in sending + * messages to simulate a network + */ +export async function streamPair (opts: StreamPairOptions = {}): Promise<[Stream, Stream]> { + const [outboundConnection, inboundConnection] = multiaddrConnectionPair(opts) + + const localMuxer = mockMuxer().createStreamMuxer(outboundConnection) + const remoteMuxer = mockMuxer().createStreamMuxer(inboundConnection) + + const [ + outboundStream, + inboundStream + ] = await Promise.all([ + localMuxer.createStream({ + protocol: opts.protocol + }), + raceEvent>(remoteMuxer, 'stream').then(evt => { + return evt.detail + }) + ]) + + return [ + outboundStream, + inboundStream + ] +} + +export async function echoStream (opts: StreamPairOptions = {}): Promise { + const [outbound, inbound] = await streamPair(opts) + echo(inbound) + + return outbound +} diff --git a/packages/utils/src/stream-to-ma-conn.ts b/packages/utils/src/stream-to-ma-conn.ts deleted file mode 100644 index 732b71bbe7..0000000000 --- a/packages/utils/src/stream-to-ma-conn.ts +++ /dev/null @@ -1,106 +0,0 @@ -import forEach from 'it-foreach' -import { pipe } from 'it-pipe' -import type { Logger, MultiaddrConnection, Stream } from '@libp2p/interface' -import type { Multiaddr } from '@multiformats/multiaddr' -import type { Uint8ArrayList } from 'uint8arraylist' - -export interface StreamProperties { - stream: Stream - remoteAddr: Multiaddr - localAddr: Multiaddr - log: Logger - - /** - * A callback invoked when data is read from the stream - */ - onDataRead?(buf: Uint8ArrayList | Uint8Array): void - - /** - * A callback invoked when data is written to the stream - */ - onDataWrite?(buf: Uint8ArrayList | Uint8Array): void -} - -/** - * Convert a duplex iterable into a MultiaddrConnection. - * https://github.com/libp2p/interface-transport#multiaddrconnection - */ -export function streamToMaConnection (props: StreamProperties): MultiaddrConnection { - const { stream, remoteAddr, log, onDataRead, onDataWrite } = props - - let closedRead = false - let closedWrite = false - - // piggyback on `stream.close` invocations to close multiaddr connection - const streamClose = stream.close.bind(stream) - stream.close = async (options): Promise => { - await streamClose(options) - close(true) - } - - // piggyback on `stream.abort` invocations to close multiaddr connection - const streamAbort = stream.abort.bind(stream) - stream.abort = (err): void => { - streamAbort(err) - close(true) - } - - // piggyback on `stream.sink` invocations to close multiaddr connection - const streamSink = stream.sink.bind(stream) - stream.sink = async (source): Promise => { - try { - await streamSink( - pipe( - source, - (source) => forEach(source, buf => onDataWrite?.(buf)) - ) - ) - } catch (err: any) { - maConn.log.error('errored - %e', err) - - // If aborted we can safely ignore - if (err.type !== 'aborted') { - // If the source errored the socket will already have been destroyed by - // toIterable.duplex(). If the socket errored it will already be - // destroyed. There's nothing to do here except log the error & return. - maConn.log.error('%s error in sink - %e', remoteAddr, err) - } - } finally { - closedWrite = true - close() - } - } - - const maConn: MultiaddrConnection = { - log: log.newScope('stream-to-maconn'), - sink: stream.sink, - source: (async function * (): AsyncGenerator { - try { - for await (const buf of stream.source) { - onDataRead?.(buf) - yield buf - } - } finally { - closedRead = true - close() - } - }()), - remoteAddr, - timeline: { open: Date.now(), close: undefined }, - close: stream.close, - abort: stream.abort - } - - function close (force?: boolean): void { - if (force === true) { - closedRead = true - closedWrite = true - } - - if (closedRead && closedWrite && maConn.timeline.close == null) { - maConn.timeline.close = Date.now() - } - } - - return maConn -} diff --git a/packages/utils/src/stream-utils.ts b/packages/utils/src/stream-utils.ts new file mode 100644 index 0000000000..258558b80e --- /dev/null +++ b/packages/utils/src/stream-utils.ts @@ -0,0 +1,807 @@ +import { StreamMessageEvent, StreamCloseEvent } from '@libp2p/interface' +import { pipe as itPipe } from 'it-pipe' +import { pushable } from 'it-pushable' +import { pEvent } from 'p-event' +import { raceEvent } from 'race-event' +import { raceSignal } from 'race-signal' +import * as varint from 'uint8-varint' +import { Uint8ArrayList } from 'uint8arraylist' +import { UnexpectedEOFError } from './errors.js' +import type { MessageStream } from '@libp2p/interface' +import type { AbortOptions } from '@multiformats/multiaddr' +import type { Duplex, Source, Transform, Sink } from 'it-stream-types' + +const DEFAULT_MAX_BUFFER_SIZE = 4_194_304 + +export class UnwrappedError extends Error { + static name = 'UnwrappedError' + name = 'UnwrappedError' +} + +/** + * The reported length of the next data message was not a positive integer + */ +export class InvalidMessageLengthError extends Error { + name = 'InvalidMessageLengthError' + code = 'ERR_INVALID_MSG_LENGTH' +} + +/** + * The reported length of the next data message was larger than the configured + * max allowable value + */ +export class InvalidDataLengthError extends Error { + name = 'InvalidDataLengthError' + code = 'ERR_MSG_DATA_TOO_LONG' +} + +/** + * The varint used to specify the length of the next data message contained more + * bytes than the configured max allowable value + */ +export class InvalidDataLengthLengthError extends Error { + name = 'InvalidDataLengthLengthError' + code = 'ERR_MSG_LENGTH_TOO_LONG' +} + +export interface ByteStreamOpts { + /** + * Incoming bytes are buffered until read, this setting limits how many bytes + * will be buffered. + * + * @default 4_194_304 + */ + maxBufferSize?: number + + /** + * If true, prevent message events propagating after they have been received, + * + * This is useful for when there are be other observers of messages and the + * caller does not wish to them to receive anything + */ + stopPropagation?: boolean +} + +export interface ReadBytesOptions extends AbortOptions { + /** + * If specified, read this number of bytes from the stream + */ + bytes: number +} + +export interface ByteStream { + /** + * Read bytes from the stream. + * + * If a required number of bytes is passed as an option, this will wait for + * the underlying stream to supply that number of bytes, throwing an + * `UnexpectedEOFError` if the stream closes before this happens. + * + * If no required number of bytes is passed, this will return `null` if the + * underlying stream closes before supplying any bytes. + */ + read(options: ReadBytesOptions): Promise + read(options?: AbortOptions): Promise + + /** + * Write the passed bytes to the stream + */ + write(data: Uint8Array | Uint8ArrayList, options?: AbortOptions): Promise + + /** + * After calling this method the stream can no longer be used. Any unread data + * will be emitted as a message event during the microtask queue of the + * current event loop tick. + */ + unwrap(): Stream +} + +export function byteStream (stream: Stream, opts?: ByteStreamOpts): ByteStream { + const maxBufferSize = opts?.maxBufferSize ?? DEFAULT_MAX_BUFFER_SIZE + const readBuffer = new Uint8ArrayList() + let hasBytes = Promise.withResolvers() + let unwrapped = false + + const onMessage = (evt: StreamMessageEvent): void => { + if (opts?.stopPropagation === true) { + evt.stopImmediatePropagation() + } + + readBuffer.append(evt.data) + + if (readBuffer.byteLength > maxBufferSize) { + const readBufferSize = readBuffer.byteLength + readBuffer.consume(readBuffer.byteLength) + hasBytes.reject(new Error(`Read buffer overflow - ${readBufferSize} > ${maxBufferSize}`)) + } + + hasBytes.resolve() + } + stream.addEventListener('message', onMessage) + + const onClose = (evt: StreamCloseEvent): void => { + if (evt.error != null) { + hasBytes.reject(evt.error) + } else { + hasBytes.resolve() + } + } + stream.addEventListener('close', onClose) + + const onRemoteCloseWrite = (): void => { + hasBytes.resolve() + } + stream.addEventListener('remoteCloseWrite', onRemoteCloseWrite) + + const byteStream: ByteStream = { + // @ts-expect-error options type prevents type inference + async read (options?: ReadBytesOptions) { + if (unwrapped === true) { + throw new UnwrappedError('Stream was unwrapped') + } + + if (stream.readStatus !== 'readable') { + if (options?.bytes == null) { + return null + } + + throw new UnexpectedEOFError('Unexpected EOF - stream was not readable') + } + + const bytesToRead = options?.bytes ?? 1 + + while (true) { + if (readBuffer.byteLength >= bytesToRead) { + // if we are about to exit the loop this promise will not be awaited + // so resolve it to prevent and unhandled promise rejections that may + // occur + hasBytes.resolve() + + break + } + + await raceSignal(hasBytes.promise, options?.signal) + + if (stream.readStatus !== 'readable') { + if (readBuffer.byteLength === 0 && options?.bytes == null) { + return null + } + + break + } + + hasBytes = Promise.withResolvers() + } + + const toRead = options?.bytes ?? readBuffer.byteLength + + if (readBuffer.byteLength < toRead) { + if (stream.readStatus !== 'readable') { + throw new UnexpectedEOFError(`Unexpected EOF - stream status was "${stream.readStatus}" and not "readable"`) + } + + return byteStream.read(options) + } + + const output = readBuffer.sublist(0, toRead) + readBuffer.consume(toRead) + + return output + }, + async write (data: Uint8Array | Uint8ArrayList, options?: AbortOptions) { + if (unwrapped === true) { + throw new UnwrappedError('Stream was unwrapped') + } + + const sendMore = stream.send(data) + + if (sendMore === false) { + await raceEvent(stream, 'drain', options?.signal) + } + }, + unwrap () { + // already unwrapped, just return the original stream + if (unwrapped) { + return stream + } + + // only unwrap once + unwrapped = true + stream.removeEventListener('message', onMessage) + stream.removeEventListener('close', onClose) + stream.removeEventListener('remoteCloseWrite', onRemoteCloseWrite) + + // emit any unread data + if (readBuffer.byteLength > 0) { + stream.push(readBuffer) + } + + return stream + } + } + + return byteStream +} + +export interface LengthPrefixedStream { + /** + * Read the next length-prefixed number of bytes from the stream + */ + read(options?: AbortOptions): Promise + + /** + * Write the passed bytes to the stream prefixed by their length + */ + write(data: Uint8Array | Uint8ArrayList, options?: AbortOptions): Promise + + /** + * Write passed list of bytes, prefix by their individual lengths to the stream as a single write + */ + writeV(input: Array, options?: AbortOptions): Promise + + /** + * Returns the underlying stream + */ + unwrap(): Stream +} + +export interface LengthPrefixedStreamOpts extends ByteStreamOpts { + lengthEncoder (value: number): Uint8ArrayList | Uint8Array + lengthDecoder (data: Uint8ArrayList): number + maxLengthLength: number + maxDataLength: number +} + +export function lpStream (stream: Stream, opts: Partial = {}): LengthPrefixedStream { + const bytes = byteStream(stream, opts) + + if (opts.maxDataLength != null && opts.maxLengthLength == null) { + // if max data length is set but max length length is not, calculate the + // max length length needed to encode max data length + opts.maxLengthLength = varint.encodingLength(opts.maxDataLength) + } + + const decodeLength = opts?.lengthDecoder ?? varint.decode + const encodeLength = opts?.lengthEncoder ?? varint.encode + + const lpStream: LengthPrefixedStream = { + async read (options?: AbortOptions) { + let dataLength: number = -1 + const lengthBuffer = new Uint8ArrayList() + + while (true) { + const buf = await bytes.read({ + ...options, + bytes: 1 + }) + + // the underlying resource closed gracefully + if (buf == null) { + break + } + + // read one byte at a time until we can decode a varint + lengthBuffer.append(buf) + + try { + dataLength = decodeLength(lengthBuffer) + } catch (err) { + if (err instanceof RangeError) { + continue + } + + throw err + } + + if (dataLength < 0) { + throw new InvalidMessageLengthError('Invalid message length') + } + + if (opts?.maxLengthLength != null && lengthBuffer.byteLength > opts.maxLengthLength) { + throw new InvalidDataLengthLengthError(`Message length length too long - ${lengthBuffer.byteLength} > ${opts.maxLengthLength}`) + } + + if (dataLength > -1) { + break + } + } + + if (opts?.maxDataLength != null && dataLength > opts.maxDataLength) { + throw new InvalidDataLengthError(`Message length too long - ${dataLength} > ${opts.maxDataLength}`) + } + + const buf = await bytes.read({ + ...options, + bytes: dataLength + }) + + if (buf == null) { + throw new UnexpectedEOFError(`Unexpected EOF - tried to read ${dataLength} bytes but the stream closed`) + } + + if (buf.byteLength !== dataLength) { + throw new UnexpectedEOFError(`Unexpected EOF - read ${buf.byteLength}/${dataLength} bytes before the stream closed`) + } + + return buf + }, + async write (data, options?: AbortOptions) { + // encode, write + await bytes.write(new Uint8ArrayList(encodeLength(data.byteLength), data), options) + }, + async writeV (data, options?: AbortOptions) { + const list = new Uint8ArrayList( + ...data.flatMap(buf => ([encodeLength(buf.byteLength), buf])) + ) + + // encode, write + await bytes.write(list, options) + }, + unwrap () { + return bytes.unwrap() + } + } + + return lpStream +} + +/** + * A protobuf decoder - takes a byte array and returns an object + */ +export interface ProtobufDecoder { + (data: Uint8Array | Uint8ArrayList): T +} + +/** + * A protobuf encoder - takes an object and returns a byte array + */ +export interface ProtobufEncoder { + (data: T): Uint8Array +} + +/** + * Convenience methods for working with protobuf streams + */ +export interface ProtobufStream { + /** + * Read the next length-prefixed byte array from the stream and decode it as the passed protobuf format + */ + read(proto: { decode: ProtobufDecoder }, options?: AbortOptions): Promise + + /** + * Encode the passed object as a protobuf message and write it's length-prefixed bytes to the stream + */ + write(data: T, proto: { encode: ProtobufEncoder }, options?: AbortOptions): Promise + + /** + * Encode the passed objects as protobuf messages and write their length-prefixed bytes to the stream as a single write + */ + writeV(input: T[], proto: { encode: ProtobufEncoder }, options?: AbortOptions): Promise + + /** + * Returns an object with read/write methods for operating on one specific type of protobuf message + */ + pb(proto: { encode: ProtobufEncoder, decode: ProtobufDecoder }): ProtobufMessageStream + + /** + * Returns the underlying stream + */ + unwrap(): Stream +} + +/** + * A message reader/writer that only uses one type of message + */ +export interface ProtobufMessageStream { + /** + * Read a message from the stream + */ + read(options?: AbortOptions): Promise + + /** + * Write a message to the stream + */ + write(d: T, options?: AbortOptions): Promise + + /** + * Write several messages to the stream + */ + writeV(d: T[], options?: AbortOptions): Promise + + /** + * Unwrap the underlying protobuf stream + */ + unwrap(): ProtobufStream +} + +export interface ProtobufStreamOpts extends LengthPrefixedStreamOpts { + +} + +export function pbStream (stream: Stream, opts?: Partial): ProtobufStream { + const lp = lpStream(stream, opts) + + const pbStream: ProtobufStream = { + read: async (proto, options?: AbortOptions) => { + // readLP, decode + const value = await lp.read(options) + + return proto.decode(value) + }, + write: async (message, proto, options?: AbortOptions) => { + // encode, writeLP + await lp.write(proto.encode(message), options) + }, + writeV: async (messages, proto, options?: AbortOptions) => { + // encode, writeLP + await lp.writeV(messages.map(message => proto.encode(message)), options) + }, + pb: (proto) => { + return { + read: async (options) => pbStream.read(proto, options), + write: async (d, options) => pbStream.write(d, proto, options), + writeV: async (d, options) => pbStream.writeV(d, proto, options), + unwrap: () => pbStream + } + }, + unwrap: () => { + return lp.unwrap() + } + } + + return pbStream +} + +export function echo (channel: MessageStream): ReturnType { + channel.addEventListener('remoteCloseWrite', () => { + channel.closeWrite() + }) + + return pipe(channel, channel) +} + +export type PipeInput = Iterable | AsyncIterable | MessageStream + +function isMessageStream (obj?: any): obj is MessageStream { + return obj?.addEventListener != null +} + +export function messageStreamToDuplex (stream: MessageStream): Duplex, Iterable | AsyncIterable> { + const source = pushable() + const onError = Promise.withResolvers>() + + const onMessage = (evt: StreamMessageEvent): void => { + source.push(evt.data) + } + + const onRemoteCloseWrite = (): void => { + source.end() + + stream.removeEventListener('message', onMessage) + stream.removeEventListener('close', onClose) + stream.removeEventListener('remoteCloseWrite', onRemoteCloseWrite) + } + + const onClose = (evt: StreamCloseEvent): void => { + source.end(evt.error) + + if (evt.error != null) { + onError.reject(evt.error) + } + + stream.removeEventListener('message', onMessage) + stream.removeEventListener('close', onClose) + stream.removeEventListener('remoteCloseWrite', onRemoteCloseWrite) + } + + stream.addEventListener('message', onMessage) + stream.addEventListener('close', onClose, { + once: true + }) + stream.addEventListener('remoteCloseWrite', onRemoteCloseWrite, { + once: true + }) + + return { + source, + async sink (source: Source) { + async function * toGenerator (): AsyncGenerator { + yield * source + } + + const gen = toGenerator() + + while (true) { + const { done, value } = await Promise.race([ + gen.next(), + onError.promise + ]) + + if (value != null) { + if (!stream.send(value)) { + await Promise.race([ + pEvent(stream, 'drain', { + rejectionEvents: [ + 'close' + ] + }) + ]) + } + } + + if (done === true) { + break + } + } + + await stream.closeWrite() + } + } +} + +interface SourceFn { (): A } + +type PipeSource = + Iterable | + AsyncIterable | + SourceFn | + Duplex | + MessageStream + +type PipeTransform = + Transform | + Duplex | + MessageStream + +type PipeSink = + Sink | + Duplex | + MessageStream + +type PipeOutput = + A extends Sink ? ReturnType : + A extends Duplex ? ReturnType : + A extends MessageStream ? AsyncGenerator : + never + +// single item pipe output includes pipe source types +type SingleItemPipeOutput = + A extends Iterable ? A : + A extends AsyncIterable ? A : + A extends SourceFn ? ReturnType : + A extends Duplex ? A['source'] : + PipeOutput + +type PipeFnInput = + A extends Iterable ? A : + A extends AsyncIterable ? A : + A extends SourceFn ? ReturnType : + A extends Transform ? ReturnType : + A extends Duplex ? A['source'] : + never + +export function pipe< + A extends PipeSource +> ( + source: A +): SingleItemPipeOutput +// two items, source to sink +export function pipe< + A extends PipeSource, + B extends PipeSink> +> ( + source: A, + sink: B +): PipeOutput + +// three items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeSink> +> ( + source: A, + transform1: B, + sink: C +): PipeOutput + +// many items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + sink: D +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + sink: E +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + sink: F +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + sink: G +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeTransform>, + H extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + transform6: G, + sink: H +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeTransform>, + H extends PipeTransform>, + I extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + transform6: G, + transform7: H, + sink: I +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeTransform>, + H extends PipeTransform>, + I extends PipeTransform>, + J extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + transform6: G, + transform7: H, + transform8: I, + sink: J +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeTransform>, + H extends PipeTransform>, + I extends PipeTransform>, + J extends PipeTransform>, + K extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + transform6: G, + transform7: H, + transform8: I, + transform9: J, + sink: K +): PipeOutput + +// lots of items, source to sink with transform(s) in between +export function pipe< + A extends PipeSource, + B extends PipeTransform>, + C extends PipeTransform>, + D extends PipeTransform>, + E extends PipeTransform>, + F extends PipeTransform>, + G extends PipeTransform>, + H extends PipeTransform>, + I extends PipeTransform>, + J extends PipeTransform>, + K extends PipeTransform>, + L extends PipeSink> +> ( + source: A, + transform1: B, + transform2: C, + transform3: D, + transform4: E, + transform5: F, + transform6: G, + transform7: H, + transform8: I, + transform9: J, + transform10: K, + sink: L +): PipeOutput +export function pipe (...input: any[]): any { + const sources = input.map(source => { + if (isMessageStream(source)) { + return messageStreamToDuplex(source) + } + + return source + }) + + // @ts-expect-error it-pipe types say args cannot be spread like this + return itPipe(...sources) +} diff --git a/packages/utils/test/abstract-stream.spec.ts b/packages/utils/test/abstract-stream.spec.ts deleted file mode 100644 index 4d131aa6ff..0000000000 --- a/packages/utils/test/abstract-stream.spec.ts +++ /dev/null @@ -1,262 +0,0 @@ -import { logger } from '@libp2p/logger' -import { expect } from 'aegir/chai' -import delay from 'delay' -import all from 'it-all' -import drain from 'it-drain' -import pDefer from 'p-defer' -import Sinon from 'sinon' -import { Uint8ArrayList } from 'uint8arraylist' -import { AbstractStream } from '../src/abstract-stream.js' -import type { AbortOptions } from '@libp2p/interface' - -class TestStream extends AbstractStream { - async sendNewStream (options?: AbortOptions): Promise { - - } - - async sendData (buf: Uint8ArrayList, options?: AbortOptions): Promise { - - } - - async sendReset (options?: AbortOptions): Promise { - - } - - async sendCloseWrite (options?: AbortOptions): Promise { - - } - - async sendCloseRead (options?: AbortOptions): Promise { - - } -} - -describe('abstract stream', () => { - let stream: TestStream - - beforeEach(() => { - stream = new TestStream({ - id: 'test', - direction: 'outbound', - log: logger('test'), - onEnd: (): void => {} - }) - }) - - it('sends data', async () => { - const sendSpy = Sinon.spy(stream, 'sendData') - const data = [ - Uint8Array.from([0, 1, 2, 3, 4]) - ] - - await stream.sink(data) - - const call = sendSpy.getCall(0) - expect(call.args[0].subarray()).to.equalBytes(data[0]) - }) - - it('receives data', async () => { - const data = new Uint8ArrayList( - Uint8Array.from([0, 1, 2, 3, 4]) - ) - - stream.sourcePush(data) - stream.remoteCloseWrite() - - const output = await all(stream.source) - expect(output[0]?.subarray()).to.equalBytes(data.subarray()) - }) - - it('closes', async () => { - const sendCloseReadSpy = Sinon.spy(stream, 'sendCloseRead') - const sendCloseWriteSpy = Sinon.spy(stream, 'sendCloseWrite') - const onEndSpy = Sinon.spy(stream as any, 'onEnd') - - await stream.close() - - expect(sendCloseReadSpy.calledOnce).to.be.true() - expect(sendCloseWriteSpy.calledOnce).to.be.true() - expect(onEndSpy.calledOnce).to.be.true() - - expect(stream).to.have.property('status', 'closed') - expect(stream).to.have.property('writeStatus', 'closed') - expect(stream).to.have.property('readStatus', 'closed') - expect(stream).to.have.nested.property('timeline.close').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeRead').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeWrite').that.is.a('number') - expect(stream).to.not.have.nested.property('timeline.reset') - expect(stream).to.not.have.nested.property('timeline.abort') - }) - - it('closes for reading', async () => { - const sendCloseReadSpy = Sinon.spy(stream, 'sendCloseRead') - const sendCloseWriteSpy = Sinon.spy(stream, 'sendCloseWrite') - - await stream.closeRead() - - expect(sendCloseReadSpy.calledOnce).to.be.true() - expect(sendCloseWriteSpy.called).to.be.false() - - expect(stream).to.have.property('status', 'open') - expect(stream).to.have.property('writeStatus', 'ready') - expect(stream).to.have.property('readStatus', 'closed') - expect(stream).to.not.have.nested.property('timeline.close') - expect(stream).to.have.nested.property('timeline.closeRead').that.is.a('number') - expect(stream).to.not.have.nested.property('timeline.closeWrite') - expect(stream).to.not.have.nested.property('timeline.reset') - expect(stream).to.not.have.nested.property('timeline.abort') - }) - - it('closes for writing', async () => { - const sendCloseReadSpy = Sinon.spy(stream, 'sendCloseRead') - const sendCloseWriteSpy = Sinon.spy(stream, 'sendCloseWrite') - - await stream.closeWrite() - - expect(sendCloseReadSpy.called).to.be.false() - expect(sendCloseWriteSpy.calledOnce).to.be.true() - - expect(stream).to.have.property('status', 'open') - expect(stream).to.have.property('writeStatus', 'closed') - expect(stream).to.have.property('readStatus', 'ready') - expect(stream).to.not.have.nested.property('timeline.close') - expect(stream).to.not.have.nested.property('timeline.closeRead') - expect(stream).to.have.nested.property('timeline.closeWrite').that.is.a('number') - expect(stream).to.not.have.nested.property('timeline.reset') - expect(stream).to.not.have.nested.property('timeline.abort') - }) - - it('aborts', async () => { - const sendResetSpy = Sinon.spy(stream, 'sendReset') - - stream.abort(new Error('Urk!')) - - expect(sendResetSpy.calledOnce).to.be.true() - - expect(stream).to.have.property('status', 'aborted') - expect(stream).to.have.property('writeStatus', 'closed') - expect(stream).to.have.property('readStatus', 'closed') - expect(stream).to.have.nested.property('timeline.close').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeRead').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeWrite').that.is.a('number') - expect(stream).to.not.have.nested.property('timeline.reset') - expect(stream).to.have.nested.property('timeline.abort').that.is.a('number') - - await expect(stream.sink([])).to.eventually.be.rejected - .with.property('name', 'StreamStateError') - await expect(drain(stream.source)).to.eventually.be.rejected - .with('Urk!') - }) - - it('gets reset remotely', async () => { - stream.reset() - - expect(stream).to.have.property('status', 'reset') - expect(stream).to.have.property('writeStatus', 'closed') - expect(stream).to.have.property('readStatus', 'closed') - expect(stream).to.have.nested.property('timeline.close').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeRead').that.is.a('number') - expect(stream).to.have.nested.property('timeline.closeWrite').that.is.a('number') - expect(stream).to.have.nested.property('timeline.reset').that.is.a('number') - expect(stream).to.not.have.nested.property('timeline.abort') - - await expect(stream.sink([])).to.eventually.be.rejected - .with.property('name', 'StreamStateError') - await expect(drain(stream.source)).to.eventually.be.rejected - .with.property('name', 'StreamResetError') - }) - - it('does not send close read when remote closes write', async () => { - const sendCloseReadSpy = Sinon.spy(stream, 'sendCloseRead') - - stream.remoteCloseWrite() - - await delay(100) - - expect(sendCloseReadSpy.called).to.be.false() - }) - - it('does not send close write when remote closes read', async () => { - const sendCloseWriteSpy = Sinon.spy(stream, 'sendCloseWrite') - - stream.remoteCloseRead() - - await delay(100) - - expect(sendCloseWriteSpy.called).to.be.false() - }) - - it('does not send close read or write when remote resets', async () => { - const sendCloseReadSpy = Sinon.spy(stream, 'sendCloseRead') - const sendCloseWriteSpy = Sinon.spy(stream, 'sendCloseWrite') - - stream.reset() - - await delay(100) - - expect(sendCloseReadSpy.called).to.be.false() - expect(sendCloseWriteSpy.called).to.be.false() - }) - - it('should wait for sending data to finish when closing gracefully', async () => { - const sendStarted = pDefer() - let timeFinished: number = 0 - const wasAbortedBeforeSendingFinished = pDefer() - const wasAbortedAfterSendingFinished = pDefer() - - // stub send method to simulate slow sending - stream.sendData = async (data, options): Promise => { - sendStarted.resolve() - await delay(1000) - timeFinished = Date.now() - - // slow send has finished, make sure we weren't aborted before we were - // done sending data - wasAbortedBeforeSendingFinished.resolve(options?.signal?.aborted) - - // save a reference to the signal, should be aborted after - // `stream.close()` returns - wasAbortedAfterSendingFinished.resolve(options?.signal) - } - const data = [ - Uint8Array.from([0, 1, 2, 3, 4]) - ] - - void stream.sink(data) - - // wait for send to start - await sendStarted.promise - - // close stream - await stream.close() - - // should have waited for send to complete - expect(Date.now()).to.be.greaterThanOrEqual(timeFinished) - await expect(wasAbortedBeforeSendingFinished.promise).to.eventually.be.false() - await expect(wasAbortedAfterSendingFinished.promise).to.eventually.have.property('aborted', true) - }) - - it('should abort close due to timeout with slow sender', async () => { - const sendStarted = pDefer() - - // stub send method to simulate slow sending - stream.sendData = async (): Promise => { - sendStarted.resolve() - await delay(1000) - } - const data = [ - Uint8Array.from([0, 1, 2, 3, 4]) - ] - - void stream.sink(data) - - // wait for send to start - await sendStarted.promise - - // close stream, should be aborted - await expect(stream.close({ - signal: AbortSignal.timeout(1) - })).to.eventually.be.rejected - .with.property('name', 'AbortError') - }) -}) diff --git a/packages/utils/test/close-source.spec.ts b/packages/utils/test/close-source.spec.ts deleted file mode 100644 index 67e8fcc62d..0000000000 --- a/packages/utils/test/close-source.spec.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { expect } from 'aegir/chai' -import { stubInterface } from 'sinon-ts' -import { closeSource } from '../src/close-source.js' -import type { Logger } from '@libp2p/interface' - -describe('close source', () => { - it('should close an async iterable', async () => { - let count = 0 - const iterable = (async function * (): AsyncGenerator { - while (true) { - yield count++ - } - })() - - const val = await iterable.next() - expect(val).to.have.property('done', false) - expect(val).to.have.property('value', 0) - - closeSource(iterable, stubInterface()) - - const last = await iterable.next() - expect(last).to.have.property('done', true) - expect(last).to.have.property('value', undefined) - expect(count).to.equal(1) - }) -}) diff --git a/packages/utils/test/filter/bloom-filter.spec.ts b/packages/utils/test/filters/bloom-filter.spec.ts similarity index 99% rename from packages/utils/test/filter/bloom-filter.spec.ts rename to packages/utils/test/filters/bloom-filter.spec.ts index 63e19c0957..5ecb58d152 100644 --- a/packages/utils/test/filter/bloom-filter.spec.ts +++ b/packages/utils/test/filters/bloom-filter.spec.ts @@ -1,7 +1,7 @@ // ported from xxbloom - https://github.com/ceejbot/xxbloom/blob/master/LICENSE import { expect } from 'aegir/chai' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { BloomFilter, createBloomFilter } from '../../src/filters/bloom-filter.js' +import { BloomFilter, createBloomFilter } from '../../src/filters/bloom-filter.ts' function hasBitsSet (buffer: Uint8Array): number { let isset = 0 diff --git a/packages/utils/test/filter/cuckoo-filter.spec.ts b/packages/utils/test/filters/cuckoo-filter.spec.ts similarity index 94% rename from packages/utils/test/filter/cuckoo-filter.spec.ts rename to packages/utils/test/filters/cuckoo-filter.spec.ts index 823d2c6d8d..83c5786d49 100644 --- a/packages/utils/test/filter/cuckoo-filter.spec.ts +++ b/packages/utils/test/filters/cuckoo-filter.spec.ts @@ -1,6 +1,6 @@ import { randomBytes } from '@libp2p/crypto' import { expect } from 'aegir/chai' -import { CuckooFilter } from '../../src/filters/cuckoo-filter.js' +import { CuckooFilter } from '../../src/filters/cuckoo-filter.ts' describe('cuckoo-filter', () => { let keys: Uint8Array[] diff --git a/packages/utils/test/filter/scalable-cuckoo-filter.spec.ts b/packages/utils/test/filters/scalable-cuckoo-filter.spec.ts similarity index 96% rename from packages/utils/test/filter/scalable-cuckoo-filter.spec.ts rename to packages/utils/test/filters/scalable-cuckoo-filter.spec.ts index 7d7f335ce0..c9933e12ed 100644 --- a/packages/utils/test/filter/scalable-cuckoo-filter.spec.ts +++ b/packages/utils/test/filters/scalable-cuckoo-filter.spec.ts @@ -1,6 +1,6 @@ import { randomBytes } from '@libp2p/crypto' import { expect } from 'aegir/chai' -import { ScalableCuckooFilter, createScalableCuckooFilter } from '../../src/filters/scalable-cuckoo-filter.js' +import { ScalableCuckooFilter, createScalableCuckooFilter } from '../../src/filters/scalable-cuckoo-filter.ts' describe('scalable-cuckoo-filter', () => { let keys: Uint8Array[] diff --git a/packages/utils/test/length-prefixed-decoder.spec.ts b/packages/utils/test/length-prefixed-decoder.spec.ts new file mode 100644 index 0000000000..d319aa933a --- /dev/null +++ b/packages/utils/test/length-prefixed-decoder.spec.ts @@ -0,0 +1,27 @@ +import { expect } from 'aegir/chai' +import all from 'it-all' +import { encode } from 'uint8-varint' +import { Uint8ArrayList } from 'uint8arraylist' +import { LengthPrefixedDecoder } from '../src/length-prefixed-decoder.ts' + +describe('length-prefixed-decoder', () => { + it('should decode', () => { + const buf1 = Uint8Array.from([0, 1, 2, 3, 4]) + const buf2 = Uint8Array.from([5, 6, 7, 8, 9]) + + const input = [ + encode(buf1.byteLength), + buf1, + encode(buf2.byteLength), + buf2 + ] + + const decoder = new LengthPrefixedDecoder() + const result = all(decoder.decode(new Uint8ArrayList(...input))) + + expect(result).to.deep.equal([ + new Uint8ArrayList(buf1), + new Uint8ArrayList(buf2) + ]) + }) +}) diff --git a/packages/utils/test/message-queue.spec.ts b/packages/utils/test/message-queue.spec.ts new file mode 100644 index 0000000000..857b1b61ce --- /dev/null +++ b/packages/utils/test/message-queue.spec.ts @@ -0,0 +1,65 @@ +import { defaultLogger } from '@libp2p/logger' +import { expect } from 'aegir/chai' +import { raceEvent } from 'race-event' +import { MessageQueue } from '../src/message-queue.ts' + +export interface Events { + event: Event +} + +describe('message-queue', () => { + it('should send data', async () => { + const event = new Event('event') + const queue = new MessageQueue({ + log: defaultLogger().forComponent('message-queue') + }) + const eventPromise = raceEvent(queue, 'event') + queue.send(event) + + await expect(eventPromise).to.eventually.equal(event) + }) + + it('should send data after a delay', async () => { + const event = new Event('event') + const queue = new MessageQueue({ + delay: 10, + log: defaultLogger().forComponent('message-queue') + }) + queue.send(event) + + const sent = await raceEvent(queue, 'event') + + expect(sent).to.equal(event) + }) + + it('should limit capacity', async () => { + const sent: Event[] = [ + new Event('event'), + new Event('event'), + new Event('event'), + new Event('event'), + new Event('event') + ] + const received: Event[] = [] + + const queue = new MessageQueue({ + delay: 10, + capacity: 5, + log: defaultLogger().forComponent('message-queue') + }) + + queue.addEventListener('event', evt => { + received.push(evt) + }) + + expect(queue.send(sent[0])).to.be.true() + expect(queue.send(sent[1])).to.be.true() + expect(queue.send(sent[2])).to.be.true() + expect(queue.send(sent[3])).to.be.true() + expect(queue.send(sent[4])).to.be.false() + + await raceEvent(queue, 'drain') + + expect(received).to.deep.equal(sent) + }) +}) diff --git a/packages/utils/test/multiaddr-connection-pair.spec.ts b/packages/utils/test/multiaddr-connection-pair.spec.ts new file mode 100644 index 0000000000..51297ae621 --- /dev/null +++ b/packages/utils/test/multiaddr-connection-pair.spec.ts @@ -0,0 +1,93 @@ +import { expect } from 'aegir/chai' +import { pEvent } from 'p-event' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { multiaddrConnectionPair } from '../src/multiaddr-connection-pair.ts' + +describe('multiaddr-conection-pair', () => { + it('should send data', async () => { + const [outbound, inbound] = multiaddrConnectionPair() + + const sent: Uint8Array[] = [] + const received: Uint8Array[] = [] + + await inbound.closeWrite() + + inbound.addEventListener('message', (evt) => { + received.push(evt.data.subarray()) + }) + + for (let i = 0; i < 1_000; i++) { + const buf = uint8ArrayFromString(`send data ${i}`) + sent.push(buf) + + const sendMore = outbound.send(buf) + + if (!sendMore) { + await pEvent(outbound, 'drain', { + rejectionEvents: ['close'] + }) + } + } + + await Promise.all([ + pEvent(inbound, 'close'), + outbound.closeWrite() + ]) + + expect(received).to.deep.equal(sent) + }) + + it('should read and write data simultaneously', async () => { + const [outbound, inbound] = multiaddrConnectionPair() + + const messages = 10 + const outboundReceived: Uint8Array[] = [] + const inboundReceived: Uint8Array[] = [] + + outbound.addEventListener('message', (evt) => { + outboundReceived.push(evt.data.subarray()) + }) + inbound.addEventListener('message', (evt) => { + inboundReceived.push(evt.data.subarray()) + }) + + await Promise.all([ + (async () => { + for (let i = 0; i < messages; i++) { + const buf = uint8ArrayFromString(`send data ${i}`) + const sendMore = outbound.send(buf) + + if (!sendMore) { + await pEvent(outbound, 'drain', { + rejectionEvents: ['close'] + }) + } + } + + await outbound.closeWrite() + })(), + (async () => { + for (let i = 0; i < messages; i++) { + const buf = uint8ArrayFromString(`send data ${i}`) + const sendMore = inbound.send(buf) + + if (!sendMore) { + await pEvent(inbound, 'drain', { + rejectionEvents: ['close'] + }) + } + } + + await inbound.closeWrite() + })() + ]) + + await Promise.all([ + pEvent(outbound, 'close'), + pEvent(inbound, 'close') + ]) + + expect(outboundReceived).to.have.lengthOf(messages) + expect(inboundReceived).to.have.lengthOf(messages) + }) +}) diff --git a/packages/utils/test/queue.spec.ts b/packages/utils/test/queue.spec.ts index 5f2356e63d..2e6534cf61 100644 --- a/packages/utils/test/queue.spec.ts +++ b/packages/utils/test/queue.spec.ts @@ -120,6 +120,42 @@ describe('queue', () => { await Promise.all(input) }) + it('should pause', async () => { + const queue = new Queue({ + concurrency: 1 + }) + + queue.add(async () => { + await delay(10) + }) + queue.add(async () => { + await delay(10) + }) + queue.add(async () => { + await delay(10) + }) + queue.add(async () => { + await delay(10) + }) + + expect(queue.running).to.equal(1) + expect(queue.size).to.equal(4) + + queue.pause() + + await delay(100) + + expect(queue.running).to.equal(0, 'started new jobs while paused') + expect(queue.size).to.equal(3, 'started new jobs while paused') + + queue.resume() + + await delay(100) + + expect(queue.running).to.equal(0, 'did not start new jobs after resume') + expect(queue.size).to.equal(0, 'did not start new jobs after resume') + }) + it('.onEmpty()', async () => { const queue = new Queue({ concurrency: 1 }) diff --git a/packages/utils/test/rate-limiter.spec.ts b/packages/utils/test/rate-limiter.spec.ts index a637f2b534..2a1dba952e 100644 --- a/packages/utils/test/rate-limiter.spec.ts +++ b/packages/utils/test/rate-limiter.spec.ts @@ -5,10 +5,10 @@ import { RateLimiter } from '../src/rate-limiter.js' describe('RateLimiter with fixed window', function () { this.timeout(5000) - it('consume 1 point', async () => { + it('consume 1 point', () => { const testKey = 'consume1' const rateLimiterMemory = new RateLimiter({ points: 2, duration: 5 }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) const res = rateLimiterMemory.memoryStorage.get(rateLimiterMemory.getKey(testKey)) expect(res).to.have.property('consumedPoints', 1) @@ -18,51 +18,14 @@ describe('RateLimiter with fixed window', function () { const testKey = 'consume2' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 5 }) - await expect(rateLimiterMemory.consume(testKey, 2)).to.eventually.be.rejected + expect(() => rateLimiterMemory.consume(testKey, 2)).to.throw() .with.property('msBeforeNext').that.is.gte(0) }) - it('execute evenly over duration with minimum delay 20 ms', async () => { - const testKey = 'consumeEvenlyMinDelay' - const rateLimiterMemory = new RateLimiter({ - points: 100, duration: 1, execEvenly: true, execEvenlyMinDelayMs: 20 - }) - - await rateLimiterMemory.consume(testKey) - - const timeFirstConsume = Date.now() - - await rateLimiterMemory.consume(testKey) - - expect(Date.now() - timeFirstConsume >= 20).to.equal(true) - }) - - it('execute evenly over duration', async () => { - const testKey = 'consumeEvenly' - const rateLimiterMemory = new RateLimiter({ - points: 2, duration: 5, execEvenly: true, execEvenlyMinDelayMs: 1 - }) - await rateLimiterMemory.consume(testKey) - - const timeFirstConsume = Date.now() - - await rateLimiterMemory.consume(testKey) - - // Second consume should be delayed more than 2 seconds - // Explanation: - // 1) consume at 0ms, remaining duration = 5000ms - // 2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value - // , because it mustn't delay in the beginning and in the end of duration - // 3) consume after 2500ms by timeout - - const diff = Date.now() - timeFirstConsume - expect(diff > 2400 && diff < 2600).to.equal(true) - }) - it('makes penalty', async () => { const testKey = 'penalty1' const rateLimiterMemory = new RateLimiter({ points: 3, duration: 5 }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) rateLimiterMemory.penalty(testKey) @@ -75,7 +38,7 @@ describe('RateLimiter with fixed window', function () { const testKey = 'reward1' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 5 }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) rateLimiterMemory.reward(testKey) @@ -96,7 +59,7 @@ describe('RateLimiter with fixed window', function () { const testKey = 'block' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 1, blockDuration: 2 }) - await expect(rateLimiterMemory.consume(testKey, 2)).to.eventually.be.rejected + expect(() => rateLimiterMemory.consume(testKey, 2)).throw() .with.property('msBeforeNext').that.is.greaterThan(1000) }) @@ -104,11 +67,11 @@ describe('RateLimiter with fixed window', function () { const testKey = 'doNotBlockTwice' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 1, blockDuration: 2 }) - await expect(rateLimiterMemory.consume(testKey, 2)).to.eventually.be.rejected() + expect(() => rateLimiterMemory.consume(testKey, 2)).to.throw() await delay(1201) - await expect(rateLimiterMemory.consume(testKey)).to.eventually.be.rejected() + expect(() => rateLimiterMemory.consume(testKey)).to.throw() .with.property('msBeforeNext').that.is.lessThan(1000) }) @@ -116,11 +79,11 @@ describe('RateLimiter with fixed window', function () { const testKey = 'blockExpires' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 1, blockDuration: 2 }) - await expect(rateLimiterMemory.consume(testKey, 2)).to.eventually.be.rejected() + expect(() => rateLimiterMemory.consume(testKey, 2)).to.throw() await delay(2000) - const res = await rateLimiterMemory.consume(testKey) + const res = rateLimiterMemory.consume(testKey) expect(res).to.have.property('consumedPoints', 1) }) @@ -130,7 +93,7 @@ describe('RateLimiter with fixed window', function () { const rateLimiterMemory = new RateLimiter({ points: 1, duration: 1 }) rateLimiterMemory.block(testKey, 2) - await expect(rateLimiterMemory.consume(testKey)).to.eventually.be.rejected() + expect(() => rateLimiterMemory.consume(testKey)).to.throw() .with.property('msBeforeNext').that.is.within(1000, 2000) }) @@ -138,7 +101,7 @@ describe('RateLimiter with fixed window', function () { const testKey = 'get' const rateLimiterMemory = new RateLimiter({ points: 2, duration: 5 }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) const res = rateLimiterMemory.get(testKey) @@ -155,7 +118,7 @@ describe('RateLimiter with fixed window', function () { it('delete resolves true if key is set', async () => { const testKey = 'deleteKey' const rateLimiterMemory = new RateLimiter({ points: 2, duration: 5 }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) rateLimiterMemory.delete(testKey) @@ -174,7 +137,7 @@ describe('RateLimiter with fixed window', function () { const testKey = 'options.customDuration' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 5 }) - const res = await rateLimiterMemory.consume(testKey, 1, { customDuration: 1 }) + const res = rateLimiterMemory.consume(testKey, 1, { customDuration: 1 }) expect(res.msBeforeNext).to.be.lte(1000) }) @@ -182,7 +145,7 @@ describe('RateLimiter with fixed window', function () { const testKey = 'options.customDuration.forever' const rateLimiterMemory = new RateLimiter({ points: 1, duration: 5 }) - const res = await rateLimiterMemory.consume(testKey, 1, { customDuration: 0 }) + const res = rateLimiterMemory.consume(testKey, 1, { customDuration: 0 }) expect(res).to.have.property('msBeforeNext', -1) }) @@ -205,8 +168,8 @@ describe('RateLimiter with fixed window', function () { it('does not expire key if duration set to 0', async () => { const testKey = 'neverExpire' const rateLimiterMemory = new RateLimiter({ points: 2, duration: 0 }) - await rateLimiterMemory.consume(testKey, 1) - await rateLimiterMemory.consume(testKey, 1) + rateLimiterMemory.consume(testKey, 1) + rateLimiterMemory.consume(testKey, 1) const res = rateLimiterMemory.get(testKey) expect(res).to.have.property('consumedPoints', 2) @@ -251,7 +214,7 @@ describe('RateLimiter with fixed window', function () { const keyPrefix = 'test' const testKey = 'consume-negative-before-next' const rateLimiterMemory = new RateLimiter({ points: 2, duration: 5, keyPrefix }) - await rateLimiterMemory.consume(testKey) + rateLimiterMemory.consume(testKey) const rec = rateLimiterMemory.memoryStorage.storage.get(`${keyPrefix}:${testKey}`) expect(rec).to.be.ok() @@ -262,7 +225,7 @@ describe('RateLimiter with fixed window', function () { rec.expiresAt = new Date(Date.now() - 1000) - const res = await rateLimiterMemory.consume(testKey) + const res = rateLimiterMemory.consume(testKey) expect(res).to.have.property('consumedPoints', 1) expect(res).to.have.property('remainingPoints', 1) expect(res).to.have.property('msBeforeNext', 5000) diff --git a/packages/utils/test/stream-utils-test.spec.ts b/packages/utils/test/stream-utils-test.spec.ts new file mode 100644 index 0000000000..6a5235f796 --- /dev/null +++ b/packages/utils/test/stream-utils-test.spec.ts @@ -0,0 +1,181 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import all from 'it-all' +import drain from 'it-drain' +import { pEvent } from 'p-event' +import { Uint8ArrayList } from 'uint8arraylist' +import { streamPair } from '../src/stream-pair.ts' +import { echo, pipe, messageStreamToDuplex, byteStream } from '../src/stream-utils.js' + +describe('messageStreamToDuplex', () => { + it('should source all reads', async () => { + const [outgoing, incoming] = await streamPair() + + const input = new Array(10).fill(0).map((val, index) => { + return Uint8Array.from([0, 1, 2, 3, index]) + }) + + const it = messageStreamToDuplex(incoming) + + Promise.resolve().then(async () => { + for (const buf of input) { + if (!outgoing.send(buf)) { + await pEvent(outgoing, 'drain') + } + } + + await outgoing.closeWrite() + }) + + await expect(all(it.source)).to.eventually.deep.equal(input) + }) + + it('should sink all writes', async () => { + const [outgoing, incoming] = await streamPair() + + const input = new Array(10).fill(0).map((val, index) => { + return Uint8Array.from([0, 1, 2, 3, index]) + }) + + const it = messageStreamToDuplex(outgoing) + + const output: Array = [] + + incoming.addEventListener('message', (evt) => { + output.push(evt.data) + }) + + it.sink(async function * () { + yield * input + }()) + + await pEvent(incoming, 'remoteCloseWrite') + + expect(output).to.deep.equal(input) + }) + + it('should throw from source if stream is reset', async () => { + const [outgoing] = await streamPair() + + const err = new Error('Urk!') + const it = messageStreamToDuplex(outgoing) + + outgoing.abort(err) + + await expect(drain(it.source)).to.eventually.be.rejected() + .with.property('message', err.message) + }) + + it('should throw from sink if stream is reset', async () => { + const [outgoing] = await streamPair() + + const err = new Error('Urk!') + const it = messageStreamToDuplex(outgoing) + + outgoing.abort(err) + + await expect(it.sink([ + Uint8Array.from([0, 1, 2, 3]) + ])).to.eventually.be.rejected() + .with.property('message', err.message) + }) +}) + +describe('echo', () => { + it('should echo message streams', async () => { + const [outgoing, incoming] = await streamPair() + + void echo(incoming) + + const input = new Array(10).fill(0).map((val, index) => { + return Uint8Array.from([0, 1, 2, 3, index]) + }) + + const [, output] = await Promise.all([ + Promise.resolve().then(async () => { + for (const buf of input) { + if (!outgoing.send(buf)) { + await pEvent(outgoing, 'drain') + } + } + + await outgoing.closeWrite() + }), + all(outgoing) + ]) + + expect(output).to.deep.equal(input) + }) +}) + +describe('pipe', () => { + it('should pipe from one channel to another', async () => { + const [outgoing, incoming] = await streamPair() + + void echo(incoming) + + const input = [ + Uint8Array.from([0, 1, 2, 3]), + Uint8Array.from([4, 5, 6, 7]), + Uint8Array.from([8, 9, 0, 1]) + ] + + const vals = await pipe( + input, + function * (source) { + for (const buf of source) { + yield Uint8Array.from( + [...buf].map((val) => val + 1) + ) + } + }, + outgoing, + (source) => all(source) + ) + + expect(vals).to.deep.equal([ + Uint8Array.from([1, 2, 3, 4]), + Uint8Array.from([5, 6, 7, 8]), + Uint8Array.from([9, 10, 1, 2]) + ]) + }) +}) + +describe('byte-stream', () => { + it('should read bytes', async () => { + const [outgoing, incoming] = await streamPair() + + const outgoingBytes = byteStream(outgoing) + const incomingBytes = byteStream(incoming) + + const written = new Uint8ArrayList(Uint8Array.from([0, 1, 2, 3])) + + const [read] = await Promise.all([ + incomingBytes.read(), + outgoingBytes.write(written) + ]) + + expect(read).to.deep.equal(written) + }) + + it('should read and write bytes', async () => { + const [outgoing, incoming] = await streamPair() + + const outgoingBytes = byteStream(outgoing) + const incomingBytes = byteStream(incoming) + + const writtenOutgoing = new Uint8ArrayList(Uint8Array.from([0, 1, 2, 3])) + const writtenIncoming = new Uint8ArrayList(Uint8Array.from([4, 5, 6, 7])) + + const [readIncoming, , readOutgoing] = await Promise.all([ + incomingBytes.read(), + outgoingBytes.write(writtenOutgoing), + outgoingBytes.read(), + incomingBytes.write(writtenIncoming) + ]) + + expect(readIncoming).to.deep.equal(writtenOutgoing) + expect(readOutgoing).to.deep.equal(writtenIncoming) + }) +})