Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.
2 changes: 2 additions & 0 deletions docs/core-api/REFS.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ for await (const ref of ipfs.refs(ipfsPath, { recursive: true })) {

> Output all local references (CIDs of all blocks in the blockstore)

Blocks in the blockstore are stored by multihash and not CID so yielded CIDs are v1 CIDs with the 'raw' codec. These may not match the CID originally used to store a given block, though the multihash contained within the CID will.

### Parameters

None
Expand Down
2 changes: 1 addition & 1 deletion examples/custom-ipfs-repo/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"dependencies": {
"datastore-fs": "^1.1.0",
"ipfs": "^0.47.0",
"ipfs-repo": "^3.0.0",
"ipfs-repo": "^4.0.0",
"it-all": "^1.0.1"
},
"devDependencies": {
Expand Down
4 changes: 2 additions & 2 deletions packages/interface-ipfs-core/src/block/rm.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ module.exports = (common, options) => {
// block should be present in the local store
const localRefs = await all(ipfs.refs.local())
expect(localRefs).to.have.property('length').that.is.greaterThan(0)
expect(localRefs.find(ref => ref.ref === cid.toString())).to.be.ok()
expect(localRefs.find(ref => ref.ref === new CID(1, 'raw', cid.multihash).toString())).to.be.ok()

const result = await all(ipfs.block.rm(cid))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
Expand All @@ -49,7 +49,7 @@ module.exports = (common, options) => {

// did we actually remove the block?
const localRefsAfterRemove = await all(ipfs.refs.local())
expect(localRefsAfterRemove.find(ref => ref.ref === cid.toString())).to.not.be.ok()
expect(localRefsAfterRemove.find(ref => ref.ref === new CID(1, 'raw', cid.multihash).toString())).to.not.be.ok()
})

it('should remove by CID in string', async () => {
Expand Down
19 changes: 17 additions & 2 deletions packages/interface-ipfs-core/src/refs-local.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const all = require('it-all')
const importer = require('ipfs-unixfs-importer')
const drain = require('it-drain')
const testTimeout = require('./utils/test-timeout')
const CID = require('cids')

/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
Expand Down Expand Up @@ -54,8 +55,22 @@ module.exports = (common, options) => {

const refs = await all(ipfs.refs.local())
const cids = refs.map(r => r.ref)
expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')

expect(
cids.find(cid => {
const multihash = new CID(cid).multihash

return imported[0].cid.multihash.equals(multihash)
})
).to.be.ok()

expect(
cids.find(cid => {
const multihash = new CID(cid).multihash

return imported[1].cid.multihash.equals(multihash)
})
).to.be.ok()
})
})
}
42 changes: 18 additions & 24 deletions packages/interface-ipfs-core/src/repo/gc.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha')
const { DAGNode } = require('ipld-dag-pb')
const all = require('it-all')
const testTimeout = require('../utils/test-timeout')
const CID = require('cids')

/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
Expand Down Expand Up @@ -58,15 +59,15 @@ module.exports = (common, options) => {
// the initial list and contain hash
const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
expect(refsAfterAdd.map(r => r.ref)).includes(cid.toString())
expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(cid.multihash)

// Run garbage collection
await all(ipfs.repo.gc())

// Get the list of local blocks after GC, should still contain the hash,
// because the file is still pinned
const refsAfterGc = await all(ipfs.refs.local())
expect(refsAfterGc.map(r => r.ref)).includes(cid.toString())
expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(cid.multihash)

// Unpin the data
await ipfs.pin.rm(cid)
Expand All @@ -76,7 +77,7 @@ module.exports = (common, options) => {

// The list of local blocks should no longer contain the hash
const refsAfterUnpinAndGc = await all(ipfs.refs.local())
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cid.toString())
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(cid.multihash)
})

it('should clean up removed MFS files', async () => {
Expand All @@ -87,21 +88,20 @@ module.exports = (common, options) => {
await ipfs.files.write('/test', Buffer.from('oranges'), { create: true })
const stats = await ipfs.files.stat('/test')
expect(stats.type).to.equal('file')
const hash = stats.cid.toString()

// Get the list of local blocks after the add, should be bigger than
// the initial list and contain hash
const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
expect(refsAfterAdd.map(r => r.ref)).includes(hash)
expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(stats.cid.multihash)

// Run garbage collection
await all(ipfs.repo.gc())

// Get the list of local blocks after GC, should still contain the hash,
// because the file is in MFS
const refsAfterGc = await all(ipfs.refs.local())
expect(refsAfterGc.map(r => r.ref)).includes(hash)
expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(stats.cid.multihash)

// Remove the file
await ipfs.files.rm('/test')
Expand All @@ -111,7 +111,7 @@ module.exports = (common, options) => {

// The list of local blocks should no longer contain the hash
const refsAfterUnpinAndGc = await all(ipfs.refs.local())
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(stats.cid.multihash)
})

it('should clean up block only after unpinned and removed from MFS', async () => {
Expand All @@ -135,17 +135,15 @@ module.exports = (common, options) => {
// the initial list and contain the data hash
const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
expect(hashesAfterAdd).includes(dataCid.toString())
expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash)

// Run garbage collection
await all(ipfs.repo.gc())

// Get the list of local blocks after GC, should still contain the hash,
// because the file is pinned and in MFS
const refsAfterGc = await all(ipfs.refs.local())
const hashesAfterGc = refsAfterGc.map(r => r.ref)
expect(hashesAfterGc).includes(dataCid.toString())
expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash)

// Remove the file
await ipfs.files.rm('/test')
Expand All @@ -156,9 +154,8 @@ module.exports = (common, options) => {
// Get the list of local blocks after GC, should still contain the hash,
// because the file is still pinned
const refsAfterRmAndGc = await all(ipfs.refs.local())
const hashesAfterRmAndGc = refsAfterRmAndGc.map(r => r.ref)
expect(hashesAfterRmAndGc).not.includes(mfsFileCid.toString())
expect(hashesAfterRmAndGc).includes(dataCid.toString())
expect(refsAfterRmAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(mfsFileCid.multihash)
expect(refsAfterRmAndGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash)

// Unpin the data
await ipfs.pin.rm(dataCid)
Expand All @@ -168,9 +165,8 @@ module.exports = (common, options) => {

// The list of local blocks should no longer contain the hashes
const refsAfterUnpinAndGc = await all(ipfs.refs.local())
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
expect(hashesAfterUnpinAndGc).not.includes(mfsFileCid.toString())
expect(hashesAfterUnpinAndGc).not.includes(dataCid.toString())
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(mfsFileCid.multihash)
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(dataCid.multihash)
})

it('should clean up indirectly pinned data after recursive pin removal', async () => {
Expand Down Expand Up @@ -201,9 +197,8 @@ module.exports = (common, options) => {
// the initial list and contain data and object hash
const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
expect(hashesAfterAdd).includes(objCid.toString())
expect(hashesAfterAdd).includes(dataCid.toString())
expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(objCid.multihash)
expect(refsAfterAdd.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash)

// Recursively pin the object
await ipfs.pin.add(objCid, { recursive: true })
Expand All @@ -218,7 +213,7 @@ module.exports = (common, options) => {
// Get the list of local blocks after GC, should still contain the data
// hash, because the data is still (indirectly) pinned
const refsAfterGc = await all(ipfs.refs.local())
expect(refsAfterGc.map(r => r.ref)).includes(dataCid.toString())
expect(refsAfterGc.map(r => new CID(r.ref).multihash)).deep.includes(dataCid.multihash)

// Recursively unpin the object
await ipfs.pin.rm(objCid.toString())
Expand All @@ -228,9 +223,8 @@ module.exports = (common, options) => {

// The list of local blocks should no longer contain the hashes
const refsAfterUnpinAndGc = await all(ipfs.refs.local())
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
expect(hashesAfterUnpinAndGc).not.includes(objCid.toString())
expect(hashesAfterUnpinAndGc).not.includes(dataCid.toString())
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(objCid.multihash)
expect(refsAfterUnpinAndGc.map(r => new CID(r.ref).multihash)).not.deep.includes(dataCid.multihash)
})
})
}
2 changes: 1 addition & 1 deletion packages/ipfs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
"ipfs-core-utils": "^0.2.4",
"ipfs-http-client": "^44.3.0",
"ipfs-http-response": "^0.5.0",
"ipfs-repo": "^3.0.0",
"ipfs-repo": "^4.0.0",
"ipfs-unixfs": "^1.0.3",
"ipfs-unixfs-exporter": "^2.0.2",
"ipfs-unixfs-importer": "^2.0.2",
Expand Down
23 changes: 18 additions & 5 deletions packages/ipfs/src/cli/commands/refs-local.js
Original file line number Diff line number Diff line change
@@ -1,27 +1,40 @@
'use strict'

const parseDuration = require('parse-duration')
const multibase = require('multibase')
const { Buffer } = require('buffer')

module.exports = {
command: 'refs-local',

describe: 'List all local references.',

epilog: 'CIDs are reconstructed therefore they might differ from those under which the blocks were originally stored.',

builder: {
timeout: {
type: 'string',
coerce: parseDuration
},
multihash: {
type: 'boolean',
default: false,
desc: 'Shows base32 encoded multihashes instead of reconstructed CIDs'
}
},

async handler ({ ctx: { ipfs, print }, timeout }) {
for await (const ref of ipfs.refs.local({
async handler ({ ctx: { ipfs, print }, timeout, cidBase, multihash }) {
for await (const { ref, err } of ipfs.refs.local({
timeout
})) {
if (ref.err) {
print(ref.err, true, true)
if (err) {
print(err, true, true)
} else {
print(ref.ref)
if (multihash) {
print(multibase.encode('base32', Buffer.from(ref)).toString().substring(1).toUpperCase())
} else {
print(ref)
}
}
}
}
Expand Down
8 changes: 4 additions & 4 deletions packages/ipfs/src/cli/commands/refs.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ module.exports = {

const k = [key].concat(keys)

for await (const ref of ipfs.refs(k, { recursive, format, edges, unique, maxDepth, timeout })) {
if (ref.err) {
print(ref.err, true, true)
for await (const { err, ref } of ipfs.refs(k, { recursive, format, edges, unique, maxDepth, timeout })) {
if (err) {
print(err, true, true)
} else {
print(ref.ref)
print(ref)
}
}
}
Expand Down
13 changes: 2 additions & 11 deletions packages/ipfs/src/core/components/refs/local.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,11 @@
'use strict'

const Repo = require('ipfs-repo')
const { withTimeoutOption } = require('../../utils')

module.exports = function ({ repo }) {
return withTimeoutOption(async function * refsLocal (options = {}) {
for await (const result of repo.blocks.query({ keysOnly: true, signal: options.signal })) {
yield dsKeyToRef(result.key)
for await (const cid of repo.blocks.query({ keysOnly: true, signal: options.signal })) {
yield { ref: cid.toString() }
}
})
}

function dsKeyToRef (key) {
try {
return { ref: Repo.utils.blockstore.keyToCid(key).toString() }
} catch (err) {
return { err: `Could not convert block with key '${key}' to CID: ${err.message}` }
}
}
14 changes: 6 additions & 8 deletions packages/ipfs/src/core/components/repo/gc.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
'use strict'

const CID = require('cids')
const { cidToString } = require('../../../utils/cid')
const log = require('debug')('ipfs:repo:gc')
const { MFS_ROOT_KEY, withTimeoutOption } = require('../../utils')
const Repo = require('ipfs-repo')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
const { parallelMerge, transform, map } = require('streaming-iterables')
const multibase = require('multibase')

// Limit on the number of parallel block remove operations
const BLOCK_RM_CONCURRENCY = 256
Expand Down Expand Up @@ -36,7 +35,7 @@ module.exports = ({ gcLock, pin, pinManager, refs, repo }) => {
})
}

// Get Set of CIDs of blocks to keep
// Get Set of multihashes of blocks to keep
async function createMarkedSet ({ pin, pinManager, refs, repo }) {
const pinsSource = map(({ cid }) => cid, pin.ls())

Expand Down Expand Up @@ -67,7 +66,7 @@ async function createMarkedSet ({ pin, pinManager, refs, repo }) {

const output = new Set()
for await (const cid of parallelMerge(pinsSource, pinInternalsSource, mfsSource)) {
output.add(cidToString(cid, { base: 'base32' }))
output.add(multibase.encode('base32', cid.multihash).toString())
}
return output
}
Expand All @@ -79,12 +78,11 @@ async function * deleteUnmarkedBlocks ({ repo, refs }, markedSet, blockKeys) {
let blocksCount = 0
let removedBlocksCount = 0

const removeBlock = async ({ key: k }) => {
const removeBlock = async (cid) => {
blocksCount++

try {
const cid = Repo.utils.blockstore.keyToCid(k)
const b32 = cid.toV1().toString('base32')
const b32 = multibase.encode('base32', cid.multihash).toString()
if (markedSet.has(b32)) return null
const res = { cid }

Expand All @@ -97,7 +95,7 @@ async function * deleteUnmarkedBlocks ({ repo, refs }, markedSet, blockKeys) {

return res
} catch (err) {
const msg = `Could not convert block with key '${k}' to CID`
const msg = `Could delete block with CID ${cid}`
log(msg, err)
return { err: new Error(msg + `: ${err.message}`) }
}
Expand Down
19 changes: 19 additions & 0 deletions packages/ipfs/test/cli/refs-local.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
const { expect } = require('interface-ipfs-core/src/utils/mocha')
const cli = require('../utils/cli')
const sinon = require('sinon')
const multibase = require('multibase')
const { Buffer } = require('buffer')

const defaultOptions = {
timeout: undefined
Expand Down Expand Up @@ -37,6 +39,23 @@ describe('refs local', () => {
expect(lines.includes(err)).to.be.true()
})

it('prints multihash of all blocks', async () => {
const ref = 'ref'
const err = 'err'

ipfs.refs.local.withArgs(defaultOptions).returns([{
ref
}, {
err
}])

const out = await cli('refs local --multihash', { ipfs })
const lines = out.split('\n')

expect(lines.includes(multibase.encode('base32', Buffer.from(ref)).toString().substring(1).toUpperCase())).to.be.true()
expect(lines.includes(err)).to.be.true()
})

it('prints CID of all blocks with timeout', async () => {
const ref = 'ref'
const err = 'err'
Expand Down