Skip to content

fix: add option to export non-extended unixfs #438

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 31, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 25 additions & 27 deletions packages/ipfs-unixfs-exporter/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ import type { PBNode } from '@ipld/dag-pb'
import type { Bucket } from 'hamt-sharding'
import type { Blockstore } from 'interface-blockstore'
import type { UnixFS } from 'ipfs-unixfs'
import type { AbortOptions } from 'it-pushable'
import type { ProgressOptions, ProgressEvent } from 'progress-events'

export * from './errors.js'
Expand Down Expand Up @@ -314,6 +313,23 @@ export interface IdentityNode extends Exportable<Uint8Array> {
*/
export type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode

export interface UnixFSBasicEntry {
/**
* The name of the entry
*/
name: string

/**
* The path of the entry within the DAG in which it was encountered
*/
path: string

/**
* The CID of the entry
*/
cid: CID
}

export interface NextResult {
cid: CID
name: string
Expand All @@ -327,39 +343,15 @@ export interface ResolveResult {
}

export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise<ResolveResult> }
export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise<ResolveResult> }
export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions | BasicExporterOptions): Promise<ResolveResult> }

export type UnixfsV1FileContent = AsyncIterable<Uint8Array> | Iterable<Uint8Array>
export type UnixfsV1DirectoryContent = AsyncIterable<UnixFSEntry> | Iterable<UnixFSEntry>
export type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent

export interface UnixfsV1BasicContent {
/**
* The name of the entry
*/
name: string

/**
* The path of the entry within the DAG in which it was encountered
*/
path: string

/**
* The CID of the entry
*/
cid: CID

/**
* Resolve the root node of the entry to parse the UnixFS metadata contained
* there. The metadata will contain what kind of node it is (e.g. file,
* directory, etc), the file size, and more.
*/
resolve(options?: AbortOptions): Promise<UnixFSEntry>
}

export interface UnixFsV1ContentResolver {
(options: ExporterOptions): UnixfsV1Content
(options: BasicExporterOptions): UnixfsV1BasicContent
(options: BasicExporterOptions): UnixFSBasicEntry
}

export interface UnixfsV1Resolver {
Expand Down Expand Up @@ -435,6 +427,8 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str
* // entries contains 4x `entry` objects
* ```
*/
export function walkPath (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): AsyncGenerator<UnixFSEntry, void, any>
export function walkPath (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): AsyncGenerator<UnixFSBasicEntry, void, any>
export async function * walkPath (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator<UnixFSEntry, void, any> {
let {
cid,
Expand Down Expand Up @@ -491,6 +485,8 @@ export async function * walkPath (path: string | CID, blockstore: ReadableStorag
* }
* ```
*/
export async function exporter (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): Promise<UnixFSEntry>
export async function exporter (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): Promise<UnixFSBasicEntry>
export async function exporter (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): Promise<UnixFSEntry> {
const result = await last(walkPath(path, blockstore, options))

Expand Down Expand Up @@ -519,6 +515,8 @@ export async function exporter (path: string | CID, blockstore: ReadableStorage,
* // entries contains all children of the `Qmfoo/foo/bar` directory and it's children
* ```
*/
export function recursive (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): AsyncGenerator<UnixFSEntry, void, any>
export function recursive (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): AsyncGenerator<UnixFSBasicEntry, void, any>
export async function * recursive (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator<UnixFSEntry, void, any> {
const node = await exporter(path, blockstore, options)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import parallel from 'it-parallel'
import { pipe } from 'it-pipe'
import { CustomProgressEvent } from 'progress-events'
import { isBasicExporterOptions } from '../../../utils/is-basic-exporter-options.ts'
import type { BasicExporterOptions, ExporterOptions, ExportWalk, UnixFSEntry, UnixfsV1BasicContent, UnixfsV1Resolver } from '../../../index.js'
import type { BasicExporterOptions, ExporterOptions, ExportWalk, UnixFSBasicEntry, UnixfsV1Resolver } from '../../../index.js'

const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => {
async function * yieldDirectoryContent (options: ExporterOptions | BasicExporterOptions = {}): any {
Expand All @@ -23,23 +23,18 @@ const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, de
const linkName = link.Name ?? ''
const linkPath = `${path}/${linkName}`

const load = async (options = {}): Promise<UnixFSEntry> => {
const result = await resolve(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options)
return result.entry
}

if (isBasicExporterOptions(options)) {
const basic: UnixfsV1BasicContent = {
const basic: UnixFSBasicEntry = {
cid: link.Hash,
name: linkName,
path: linkPath,
resolve: load
path: linkPath
}

return basic
}

return load(options)
const result = await resolve(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options)
return result.entry
}
}),
source => parallel(source, {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { pipe } from 'it-pipe'
import { CustomProgressEvent } from 'progress-events'
import { NotUnixFSError } from '../../../errors.js'
import { isBasicExporterOptions } from '../../../utils/is-basic-exporter-options.ts'
import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk, BasicExporterOptions, UnixFSEntry } from '../../../index.js'
import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk, BasicExporterOptions, UnixFSBasicEntry } from '../../../index.js'
import type { PBNode } from '@ipld/dag-pb'

const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => {
Expand Down Expand Up @@ -49,25 +49,26 @@ async function * listDirectory (node: PBNode, path: string, resolve: Resolve, de

if (name != null && name !== '') {
const linkPath = `${path}/${name}`
const load = async (options = {}): Promise<UnixFSEntry> => {
const result = await resolve(link.Hash, name, linkPath, [], depth + 1, blockstore, options)
return result.entry
}

if (isBasicExporterOptions(options)) {
const basic: UnixFSBasicEntry = {
cid: link.Hash,
name,
path: linkPath
}

return {
entries: [{
cid: link.Hash,
name,
path: linkPath,
resolve: load
}]
entries: [
basic
]
}
}

const result = await resolve(link.Hash, name, linkPath, [], depth + 1, blockstore, options)

return {
entries: [
await load()
result.entry
].filter(Boolean)
}
} else {
Expand Down
15 changes: 14 additions & 1 deletion packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@ import { decode } from '@ipld/dag-pb'
import { UnixFS } from 'ipfs-unixfs'
import { NotFoundError, NotUnixFSError } from '../../errors.js'
import findShardCid from '../../utils/find-cid-in-shard.js'
import { isBasicExporterOptions } from '../../utils/is-basic-exporter-options.ts'
import contentDirectory from './content/directory.js'
import contentFile from './content/file.js'
import contentHamtShardedDirectory from './content/hamt-sharded-directory.js'
import type { Resolver, UnixfsV1Resolver } from '../../index.js'
import type { Resolver, UnixFSBasicEntry, UnixfsV1Resolver } from '../../index.js'
import type { PBNode } from '@ipld/dag-pb'
import type { CID } from 'multiformats/cid'

Expand All @@ -30,6 +31,18 @@ const contentExporters: Record<string, UnixfsV1Resolver> = {

// @ts-expect-error types are wrong
const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => {
if (isBasicExporterOptions(options)) {
const basic: UnixFSBasicEntry = {
cid,
name,
path
}

return {
entry: basic
}
}

const block = await blockstore.get(cid, options)
const node = decode(block)
let unixfs
Expand Down
3 changes: 1 addition & 2 deletions packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -409,10 +409,9 @@ describe('exporter sharded', function () {
expect(dirFile).to.have.property('name')
expect(dirFile).to.have.property('path')
expect(dirFile).to.have.property('cid')
expect(dirFile).to.have.property('resolve')

// should fail because we have deleted this block
await expect(dirFile.resolve()).to.eventually.be.rejected()
await expect(exporter(dirFile.cid, block)).to.eventually.be.rejected()
}
})
})
59 changes: 56 additions & 3 deletions packages/ipfs-unixfs-exporter/test/exporter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1606,7 +1606,7 @@ describe('exporter', () => {
expect(actualInvocations).to.deep.equal(expectedInvocations)
})

it('exports basic directory', async () => {
it('exports basic directory contents', async () => {
const files: Record<string, { content: Uint8Array, cid?: CID }> = {}

for (let i = 0; i < 10; i++) {
Expand Down Expand Up @@ -1649,10 +1649,63 @@ describe('exporter', () => {
expect(dirFile).to.have.property('name')
expect(dirFile).to.have.property('path')
expect(dirFile).to.have.property('cid')
expect(dirFile).to.have.property('resolve')

// should fail because we have deleted this block
await expect(dirFile.resolve()).to.eventually.be.rejected()
await expect(exporter(dirFile.cid, block)).to.eventually.be.rejected()
}
})

it('exports basic file', async () => {
const imported = await all(importer([{
content: uint8ArrayFromString('hello')
}], block, {
rawLeaves: false
}))

const regularFile = await exporter(imported[0].cid, block)
expect(regularFile).to.have.property('unixfs')

const basicFile = await exporter(imported[0].cid, block, {
extended: false
})

expect(basicFile).to.have.property('name')
expect(basicFile).to.have.property('path')
expect(basicFile).to.have.property('cid')
expect(basicFile).to.not.have.property('unixfs')
})

it('exports basic directory', async () => {
const files: Record<string, { content: Uint8Array, cid?: CID }> = {}

for (let i = 0; i < 10; i++) {
files[`file-${Math.random()}.txt`] = {
content: uint8ArrayConcat(await all(randomBytes(100)))
}
}

const imported = await all(importer(Object.keys(files).map(path => ({
path,
content: asAsyncIterable(files[path].content)
})), block, {
wrapWithDirectory: true,
rawLeaves: false
}))

const dirCid = imported.pop()?.cid

if (dirCid == null) {
throw new Error('No directory CID found')
}

const basicDir = await exporter(dirCid, block, {
extended: false
})

expect(basicDir).to.have.property('name')
expect(basicDir).to.have.property('path')
expect(basicDir).to.have.property('cid')
expect(basicDir).to.not.have.property('unixfs')
expect(basicDir).to.not.have.property('content')
})
})
Loading