Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit d8d0592

Browse files
committed
chore: fix interop tests
1 parent 4ce6ee3 commit d8d0592

File tree

3 files changed

+30
-10
lines changed

3 files changed

+30
-10
lines changed

packages/ipfs-core/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
"debug": "^4.1.1",
6666
"dlv": "^1.1.3",
6767
"err-code": "^2.0.3",
68-
"hamt-sharding": "^1.0.0",
68+
"hamt-sharding": "ipfs-shipyard/js-hamt-sharding#feat/add-types",
6969
"hashlru": "^2.3.0",
7070
"interface-datastore": "^3.0.3",
7171
"ipfs-bitswap": "^4.0.2",

packages/ipfs-core/src/components/files/utils/add-link.js

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ const CID = require('cids')
88
const log = require('debug')('ipfs:mfs:core:utils:add-link')
99
const UnixFS = require('ipfs-unixfs')
1010
const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded')
11+
const defaultImporterOptions = require('ipfs-unixfs-importer/src/options')
1112
const {
1213
updateHamtDirectory,
1314
recreateHamtLevel,
@@ -153,6 +154,7 @@ const addFileToShardedDirectory = async (context, options) => {
153154
// start at the root bucket and descend, loading nodes as we go
154155
const rootBucket = await recreateHamtLevel(options.parent.Links)
155156
const node = UnixFS.unmarshal(options.parent.Data)
157+
const importerOptions = defaultImporterOptions()
156158

157159
const shard = new DirSharded({
158160
root: true,
@@ -163,7 +165,11 @@ const addFileToShardedDirectory = async (context, options) => {
163165
dirty: true,
164166
flat: false,
165167
mode: node.mode
166-
}, options)
168+
}, {
169+
hamtHashFn: importerOptions.hamtHashFn,
170+
hamtBucketBits: importerOptions.hamtBucketBits,
171+
...options
172+
})
167173
shard._bucket = rootBucket
168174

169175
if (node.mtime) {

packages/ipfs-core/src/components/files/utils/hamt-utils.js

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,23 +3,29 @@
33
const {
44
DAGNode
55
} = require('ipld-dag-pb')
6-
const Bucket = require('hamt-sharding/src/bucket')
6+
const {
7+
Bucket,
8+
createHAMT
9+
} = require('hamt-sharding')
710
const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded')
11+
const defaultImporterOptions = require('ipfs-unixfs-importer/src/options')
812
const log = require('debug')('ipfs:mfs:core:utils:hamt-utils')
913
const UnixFS = require('ipfs-unixfs')
1014
const mc = require('multicodec')
1115
const mh = require('multihashing-async').multihash
1216
const last = require('it-last')
1317

1418
const updateHamtDirectory = async (context, links, bucket, options) => {
19+
const importerOptions = defaultImporterOptions()
20+
1521
// update parent with new bit field
1622
const data = Uint8Array.from(bucket._children.bitField().reverse())
1723
const node = UnixFS.unmarshal(options.parent.Data)
1824
const dir = new UnixFS({
1925
type: 'hamt-sharded-directory',
2026
data,
2127
fanout: bucket.tableSize(),
22-
hashType: DirSharded.hashFn.code,
28+
hashType: importerOptions.hamtHashCode,
2329
mode: node.mode,
2430
mtime: node.mtime
2531
})
@@ -40,14 +46,20 @@ const updateHamtDirectory = async (context, links, bucket, options) => {
4046
}
4147

4248
const recreateHamtLevel = async (links, rootBucket, parentBucket, positionAtParent) => {
43-
// recreate this level of the HAMT
44-
const bucket = new Bucket({
45-
hashFn: DirSharded.hashFn,
46-
hash: parentBucket ? parentBucket._options.hash : undefined
47-
}, parentBucket, positionAtParent)
49+
const importerOptions = defaultImporterOptions()
50+
let bucket
4851

52+
// recreate this level of the HAMT
4953
if (parentBucket) {
54+
bucket = new Bucket({
55+
hash: parentBucket._options.hash,
56+
bits: parentBucket._options.bits
57+
}, parentBucket, positionAtParent)
5058
parentBucket._putObjectAt(positionAtParent, bucket)
59+
} else {
60+
bucket = createHAMT({
61+
hashFn: importerOptions.hamtHashFn
62+
})
5163
}
5264

5365
await addLinksToHamtBucket(links, bucket, rootBucket)
@@ -62,7 +74,8 @@ const addLinksToHamtBucket = async (links, bucket, rootBucket) => {
6274
const pos = parseInt(link.Name, 16)
6375

6476
bucket._putObjectAt(pos, new Bucket({
65-
hashFn: DirSharded.hashFn
77+
hash: bucket._options.hash,
78+
bits: bucket._options.bits
6679
}, bucket, pos))
6780

6881
return Promise.resolve()
@@ -102,6 +115,7 @@ const generatePath = async (context, fileName, rootNode) => {
102115
prefix: toPrefix(currentBucket._posAtParent)
103116
})
104117

118+
// @ts-ignore
105119
currentBucket = currentBucket._parent
106120
}
107121

0 commit comments

Comments
 (0)