diff --git a/benchmarks/import/package.json b/benchmarks/import/package.json index 2c380ca6..af01dd55 100644 --- a/benchmarks/import/package.json +++ b/benchmarks/import/package.json @@ -13,7 +13,7 @@ }, "devDependencies": { "aegir": "^47.0.16", - "blockstore-core": "^5.0.4", + "blockstore-core": "^6.0.2", "ipfs-unixfs-importer": "^15.0.0", "it-buffer-stream": "^3.0.11", "it-drain": "^3.0.10" diff --git a/benchmarks/memory/package.json b/benchmarks/memory/package.json index 70f46164..3b4c0055 100644 --- a/benchmarks/memory/package.json +++ b/benchmarks/memory/package.json @@ -13,7 +13,7 @@ }, "devDependencies": { "aegir": "^47.0.16", - "blockstore-fs": "^2.0.4", + "blockstore-fs": "^3.0.1", "ipfs-unixfs-importer": "^15.0.0", "it-drain": "^3.0.10" }, diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 1f3a4177..2d6219dc 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -143,7 +143,7 @@ "@ipld/dag-pb": "^4.1.5", "@multiformats/murmur3": "^2.1.8", "hamt-sharding": "^3.0.6", - "interface-blockstore": "^5.3.2", + "interface-blockstore": "^6.0.1", "ipfs-unixfs": "^11.0.0", "it-filter": "^3.1.4", "it-last": "^3.0.9", @@ -151,15 +151,16 @@ "it-parallel": "^3.0.13", "it-pipe": "^3.0.1", "it-pushable": "^3.2.3", + "it-to-buffer": "^4.0.10", "multiformats": "^13.3.7", - "p-queue": "^8.1.0", + "p-queue": "^9.0.0", "progress-events": "^1.0.1" }, "devDependencies": { "@types/readable-stream": "^4.0.21", "@types/sinon": "^17.0.4", "aegir": "^47.0.16", - "blockstore-core": "^5.0.4", + "blockstore-core": "^6.0.2", "delay": "^6.0.0", "ipfs-unixfs-importer": "^15.0.0", "iso-random-stream": "^2.0.2", @@ -167,7 +168,6 @@ "it-buffer-stream": "^3.0.11", "it-drain": "^3.0.10", "it-first": "^3.0.9", - "it-to-buffer": "^4.0.10", "merge-options": "^3.0.4", "readable-stream": "^4.7.0", "sinon": "^21.0.0", diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts index 0631af98..323f4255 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts @@ -1,9 +1,10 @@ import * as dagCbor from '@ipld/dag-cbor' +import toBuffer from 'it-to-buffer' import { resolveObjectPath } from '../utils/resolve-object-path.js' import type { Resolver } from '../index.js' const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - const block = await blockstore.get(cid, options) + const block = await toBuffer(blockstore.get(cid, options)) const object = dagCbor.decode(block) return resolveObjectPath(object, block, cid, name, path, toResolve, depth) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-json.ts b/packages/ipfs-unixfs-exporter/src/resolvers/dag-json.ts index c206d7af..a8ed7dae 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-json.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-json.ts @@ -1,9 +1,10 @@ import * as dagJson from '@ipld/dag-json' +import toBuffer from 'it-to-buffer' import { resolveObjectPath } from '../utils/resolve-object-path.js' import type { Resolver } from '../index.js' const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - const block = await blockstore.get(cid, options) + const block = await toBuffer(blockstore.get(cid, options)) const object = dagJson.decode(block) return resolveObjectPath(object, block, cid, name, path, toResolve, depth) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/json.ts b/packages/ipfs-unixfs-exporter/src/resolvers/json.ts index 90c5a174..92c2c852 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/json.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/json.ts @@ -1,9 +1,10 @@ +import toBuffer from 'it-to-buffer' import * as json from 'multiformats/codecs/json' import { resolveObjectPath } from '../utils/resolve-object-path.js' import type { Resolver } from '../index.js' const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - const block = await blockstore.get(cid, options) + const block = await toBuffer(blockstore.get(cid, options)) const object = json.decode(block) return resolveObjectPath(object, block, cid, name, path, toResolve, depth) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts index 4082e388..509d46ac 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts @@ -1,3 +1,4 @@ +import toBuffer from 'it-to-buffer' import { CustomProgressEvent } from 'progress-events' import { NotFoundError } from '../errors.js' import extractDataFromBlock from '../utils/extract-data-from-block.js' @@ -30,7 +31,7 @@ const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blo throw new NotFoundError(`No link named ${path} found in raw node ${cid}`) } - const block = await blockstore.get(cid, options) + const block = await toBuffer(blockstore.get(cid, options)) return { entry: { diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts index 3d549048..7cac1738 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts @@ -4,6 +4,7 @@ import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' import { pushable } from 'it-pushable' +import toBuffer from 'it-to-buffer' import * as raw from 'multiformats/codecs/raw' import PQueue from 'p-queue' import { CustomProgressEvent } from 'progress-events' @@ -76,7 +77,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A childOps, (source) => map(source, (op) => { return async () => { - const block = await blockstore.get(op.link.Hash, options) + const block = await toBuffer(blockstore.get(op.link.Hash, options)) return { ...op, diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts index d191a688..1fafb07c 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts @@ -3,6 +3,7 @@ import { UnixFS } from 'ipfs-unixfs' import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' +import toBuffer from 'it-to-buffer' import { CustomProgressEvent } from 'progress-events' import { NotUnixFSError } from '../../../errors.js' import { isBasicExporterOptions } from '../../../utils/is-basic-exporter-options.ts' @@ -73,7 +74,7 @@ async function * listDirectory (node: PBNode, path: string, resolve: Resolve, de } } else { // descend into subshard - const block = await blockstore.get(link.Hash, options) + const block = await toBuffer(blockstore.get(link.Hash, options)) node = decode(block) options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', { diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts index 4fd337f3..0e2250f3 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts @@ -1,5 +1,6 @@ import { decode } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' +import toBuffer from 'it-to-buffer' import { NotFoundError, NotUnixFSError } from '../../errors.js' import findShardCid from '../../utils/find-cid-in-shard.js' import { isBasicExporterOptions } from '../../utils/is-basic-exporter-options.ts' @@ -43,7 +44,7 @@ const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, dep } } - const block = await blockstore.get(cid, options) + const block = await toBuffer(blockstore.get(cid, options)) const node = decode(block) let unixfs let next diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts index d604ff28..c88269c1 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts @@ -2,6 +2,7 @@ import { decode } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' import { Bucket, createHAMT } from 'hamt-sharding' import { UnixFS } from 'ipfs-unixfs' +import toBuffer from 'it-to-buffer' import { NotUnixFSError } from '../errors.js' import type { ExporterOptions, ShardTraversalContext, ReadableStorage } from '../index.js' import type { PBLink, PBNode } from '@ipld/dag-pb' @@ -142,7 +143,7 @@ const findShardCid = async (node: PBNode, name: string, blockstore: ReadableStor context.hamtDepth++ - const block = await blockstore.get(link.Hash, options) + const block = await toBuffer(blockstore.get(link.Hash, options)) node = decode(block) return findShardCid(node, name, blockstore, context, options) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts index a6abbc85..35af64b4 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts @@ -8,6 +8,7 @@ import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' import randomBytes from 'it-buffer-stream' import last from 'it-last' +import toBuffer from 'it-to-buffer' import { CID } from 'multiformats/cid' import { sha256 } from 'multiformats/hashes/sha2' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' @@ -83,7 +84,7 @@ describe('exporter sharded', function () { files[imported.path].cid = imported.cid }) - const encodedBlock = await block.get(dirCid) + const encodedBlock = await toBuffer(block.get(dirCid)) const dir = dagPb.decode(encodedBlock) if (dir.Data == null) { throw Error('PBNode Data undefined') diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts index 7d4de993..f915b6c1 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts @@ -150,7 +150,7 @@ describe('exporter', () => { it('ensure hash inputs are sanitized', async () => { const result = await dagPut() - const encodedBlock = await block.get(result.cid) + const encodedBlock = await toBuffer(block.get(result.cid)) const node = dagPb.decode(encodedBlock) if (node.Data == null) { throw new Error('PBNode Data undefined') @@ -212,7 +212,7 @@ describe('exporter', () => { content: uint8ArrayConcat(await all(randomBytes(100))) }) - const encodedBlock = await block.get(result.cid) + const encodedBlock = await toBuffer(block.get(result.cid)) const node = dagPb.decode(encodedBlock) if (node.Data == null) { throw new Error('PBNode Data undefined') @@ -339,10 +339,10 @@ describe('exporter', () => { // @ts-expect-error incomplete implementation const blockStore: Blockstore = { ...block, - async get (cid: CID) { + async * get (cid: CID) { await delay(Math.random() * 10) - return block.get(cid) + yield * block.get(cid) } } @@ -1289,9 +1289,10 @@ describe('exporter', () => { // regular test IPLD is offline-only, we need to mimic what happens when // we try to get a block from the network const customBlock = { - get: async (cid: CID, options: { signal: AbortSignal }) => { + // eslint-disable-next-line require-yield + get: async function * (cid: CID, options: { signal: AbortSignal }) { // promise will never resolve, so reject it when the abort signal is sent - return new Promise((resolve, reject) => { + await new Promise((resolve, reject) => { options.signal.addEventListener('abort', () => { reject(new Error(message)) }) @@ -1299,7 +1300,6 @@ describe('exporter', () => { } } - // @ts-expect-error ipld implementation incomplete await expect(exporter(cid, customBlock, { signal: abortController.signal })).to.eventually.be.rejectedWith(message) @@ -1397,13 +1397,13 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const node = dagPb.decode(await block.get(imported.cid)) + const node = dagPb.decode(await toBuffer(block.get(imported.cid))) expect(node.Links).to.have.lengthOf(2, 'imported node had too many children') - const child1 = dagPb.decode(await block.get(node.Links[0].Hash)) + const child1 = dagPb.decode(await toBuffer(block.get(node.Links[0].Hash))) expect(child1.Links).to.have.lengthOf(2, 'layer 1 node had too many children') - const child2 = dagPb.decode(await block.get(node.Links[1].Hash)) + const child2 = dagPb.decode(await toBuffer(block.get(node.Links[1].Hash))) expect(child2.Links).to.have.lengthOf(2, 'layer 1 node had too many children') // should be raw nodes @@ -1468,7 +1468,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const node = dagPb.decode(await block.get(imported.cid)) + const node = dagPb.decode(await toBuffer(block.get(imported.cid))) expect(node.Links).to.have.lengthOf(entries, 'imported node had too many children') for (const link of node.Links) { @@ -1491,7 +1491,7 @@ describe('exporter', () => { const actualInvocations: string[] = [] - block.get = async (cid) => { + block.get = async function * (cid) { actualInvocations.push(`${cid.toString()}-start`) // introduce a small delay - if running in parallel actualInvocations will @@ -1503,7 +1503,7 @@ describe('exporter', () => { actualInvocations.push(`${cid.toString()}-end`) - return originalGet(cid) + yield * originalGet(cid) } const blockReadSpy = Sinon.spy(block, 'get') @@ -1539,7 +1539,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const node = dagPb.decode(await block.get(imported.cid)) + const node = dagPb.decode(await toBuffer(block.get(imported.cid))) const data = UnixFS.unmarshal(node.Data ?? new Uint8Array(0)) expect(data.type).to.equal('hamt-sharded-directory') @@ -1551,7 +1551,7 @@ describe('exporter', () => { children.push(link.Hash) if (link.Hash.code === dagPb.code) { - const buf = await block.get(link.Hash) + const buf = await toBuffer(block.get(link.Hash)) const childNode = dagPb.decode(buf) children.push(...(await collectCIDs(childNode))) @@ -1578,7 +1578,7 @@ describe('exporter', () => { const actualInvocations: string[] = [] - block.get = async (cid) => { + block.get = async function * (cid) { actualInvocations.push(`${cid.toString()}-start`) // introduce a small delay - if running in parallel actualInvocations will @@ -1590,7 +1590,7 @@ describe('exporter', () => { actualInvocations.push(`${cid.toString()}-end`) - return originalGet(cid) + yield * originalGet(cid) } const blockReadSpy = Sinon.spy(block, 'get') diff --git a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts index 564ead38..251e5b84 100644 --- a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts +++ b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts @@ -1,10 +1,11 @@ import * as dagPb from '@ipld/dag-pb' +import toBuffer from 'it-to-buffer' import type { Blockstore } from 'interface-blockstore' import type { CID } from 'multiformats/cid' export default function (cid: CID, blockstore: Blockstore): AsyncGenerator<{ node: Uint8Array | dagPb.PBNode, cid: CID }, void, undefined> { async function * traverse (cid: CID): AsyncGenerator<{ node: dagPb.PBNode, cid: CID }, void, unknown> { - const block = await blockstore.get(cid) + const block = await toBuffer(blockstore.get(cid)) const node = dagPb.decode(block) if (node instanceof Uint8Array || (node.Links.length === 0)) { diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.ts b/packages/ipfs-unixfs-exporter/test/importer.spec.ts index 2684468b..9db2fd64 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -11,6 +11,7 @@ import { balanced, flat, trickle } from 'ipfs-unixfs-importer/layout' import all from 'it-all' import first from 'it-first' import last from 'it-last' +import toBuffer from 'it-to-buffer' // @ts-expect-error https://github.com/schnittstabil/merge-options/pull/28 import extend from 'merge-options' import { base58btc } from 'multiformats/bases/base58' @@ -203,7 +204,7 @@ const checkLeafNodeTypes = async (blockstore: Blockstore, options: Partial blockstore.get(link.Hash)) + node.Links.map(async link => toBuffer(blockstore.get(link.Hash))) ) linkedBlocks.forEach(bytes => { @@ -232,7 +233,7 @@ const checkNodeLinks = async (blockstore: Blockstore, options: Partial +- # License