Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 31a2b4a

Browse files
committed
chore: fix more tests
1 parent d24c43e commit 31a2b4a

File tree

6 files changed

+25
-1913
lines changed

6 files changed

+25
-1913
lines changed

packages/interface-ipfs-core/src/files/touch.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ module.exports = (common, options) => {
3333
})
3434

3535
const stat2 = await ipfs.files.stat(testPath)
36-
expect(stat2).to.have.nested.deep.property('mtime', expectedMtime)
36+
expect(stat2).to.have.deep.nested.property('mtime', expectedMtime)
3737
}
3838

3939
before(async () => { ipfs = (await common.spawn()).api })

packages/ipfs-core-types/tsconfig-check.aegir.tsbuildinfo

Lines changed: 0 additions & 1898 deletions
This file was deleted.

packages/ipfs-core/src/components/files/chmod.js

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -210,13 +210,15 @@ module.exports = (context) => {
210210
dagBuilder: async function * (source, block, opts) {
211211
for await (const entry of source) {
212212
yield async function () {
213-
const cid = await persist(entry.content.serialize(), block, opts)
213+
const buf = entry.content.serialize()
214+
const cid = await persist(buf, block, opts)
215+
const unixfs = UnixFS.unmarshal(entry.content.Data)
214216

215217
return {
216218
cid,
219+
size: buf.length,
217220
path: entry.path,
218-
unixfs: UnixFS.unmarshal(entry.content.Data),
219-
node: entry.content
221+
unixfs
220222
}
221223
}
222224
}

packages/ipfs-core/src/components/files/stat.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,8 +146,8 @@ const statters = {
146146
identity: (file) => {
147147
return {
148148
cid: file.cid,
149-
size: file.node.digest.length,
150-
cumulativeSize: file.node.digest.length,
149+
size: file.unixfs.data.length,
150+
cumulativeSize: file.unixfs.data.length,
151151
blocks: 0,
152152
type: 'file', // for go compatibility
153153
local: undefined,

packages/ipfs-core/src/components/files/utils/add-link.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,7 @@ const addFileToShardedDirectory = async (context, options) => {
167167
mode: node.mode
168168
}, {
169169
hamtHashFn: importerOptions.hamtHashFn,
170+
hamtHashCode: importerOptions.hamtHashCode,
170171
hamtBucketBits: importerOptions.hamtBucketBits,
171172
...options
172173
})

packages/ipfs-core/src/components/files/utils/hamt-utils.js

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -46,19 +46,21 @@ const updateHamtDirectory = async (context, links, bucket, options) => {
4646
}
4747

4848
const recreateHamtLevel = async (links, rootBucket, parentBucket, positionAtParent) => {
49-
const importerOptions = defaultImporterOptions()
5049
let bucket
5150

5251
// recreate this level of the HAMT
5352
if (parentBucket) {
5453
bucket = new Bucket({
55-
hash: parentBucket._options.hash,
56-
bits: parentBucket._options.bits
54+
hash: rootBucket._options.hash,
55+
bits: rootBucket._options.bits
5756
}, parentBucket, positionAtParent)
5857
parentBucket._putObjectAt(positionAtParent, bucket)
5958
} else {
60-
bucket = createHAMT({
61-
hashFn: importerOptions.hamtHashFn
59+
const importerOptions = defaultImporterOptions()
60+
61+
rootBucket = bucket = createHAMT({
62+
hashFn: importerOptions.hamtHashFn,
63+
bits: importerOptions.hamtBucketBits
6264
})
6365
}
6466

@@ -74,14 +76,14 @@ const addLinksToHamtBucket = async (links, bucket, rootBucket) => {
7476
const pos = parseInt(link.Name, 16)
7577

7678
bucket._putObjectAt(pos, new Bucket({
77-
hash: bucket._options.hash,
78-
bits: bucket._options.bits
79+
hash: rootBucket._options.hash,
80+
bits: rootBucket._options.bits
7981
}, bucket, pos))
8082

8183
return Promise.resolve()
8284
}
8385

84-
return (rootBucket || bucket).put(link.Name.substring(2), {
86+
return rootBucket.put(link.Name.substring(2), {
8587
size: link.Tsize,
8688
cid: link.Hash
8789
})
@@ -187,6 +189,8 @@ const generatePath = async (context, fileName, rootNode) => {
187189
}
188190

189191
const createShard = async (context, contents, options) => {
192+
const importerOptions = defaultImporterOptions()
193+
190194
const shard = new DirSharded({
191195
root: true,
192196
dir: true,
@@ -198,6 +202,9 @@ const createShard = async (context, contents, options) => {
198202
mtime: options.mtime,
199203
mode: options.mode
200204
}, {
205+
hamtHashFn: importerOptions.hamtHashFn,
206+
hamtHashCode: importerOptions.hamtHashCode,
207+
hamtBucketBits: importerOptions.hamtBucketBits,
201208
...options,
202209
codec: 'dag-pb'
203210
})
@@ -209,7 +216,7 @@ const createShard = async (context, contents, options) => {
209216
})
210217
}
211218

212-
return last(shard.flush('', context.block, null))
219+
return last(shard.flush('', context.block))
213220
}
214221

215222
module.exports = {

0 commit comments

Comments
 (0)