|
2 | 2 |
|
3 | 3 | const sortBy = require('lodash/sortBy')
|
4 | 4 | const pull = require('pull-stream')
|
5 |
| -const getFolderSize = require('get-folder-size') |
| 5 | +const promisify = require('promisify-es6') |
| 6 | +const getFolderSize = promisify(require('get-folder-size')) |
6 | 7 | const byteman = require('byteman')
|
7 |
| -const reduce = require('async/reduce') |
8 | 8 | const mh = require('multihashes')
|
9 | 9 | const multibase = require('multibase')
|
10 | 10 | const toPull = require('stream-to-pull-stream')
|
11 | 11 | const { print, isDaemonOn, createProgressBar } = require('../utils')
|
12 | 12 | const { cidToString } = require('../../utils/cid')
|
13 | 13 | const globSource = require('../../utils/files/glob-source')
|
14 | 14 |
|
15 |
| -function getTotalBytes (paths, cb) { |
16 |
| - reduce(paths, 0, (total, path, cb) => { |
17 |
| - getFolderSize(path, (err, size) => { |
18 |
| - if (err) return cb(err) |
19 |
| - cb(null, total + size) |
20 |
| - }) |
21 |
| - }, cb) |
| 15 | +async function getTotalBytes (paths, cb) { |
| 16 | + const sizes = await Promise.all(paths.map(p => getFolderSize(p))) |
| 17 | + return sizes.reduce((total, size) => total + size, 0) |
22 | 18 | }
|
23 | 19 |
|
24 | 20 | function addPipeline (source, addStream, options) {
|
25 |
| - pull( |
26 |
| - source, |
27 |
| - addStream, |
28 |
| - pull.collect((err, added) => { |
29 |
| - if (err) { |
30 |
| - // Tweak the error message and add more relevant infor for the CLI |
31 |
| - if (err.code === 'ERR_DIR_NON_RECURSIVE') { |
32 |
| - err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` |
| 21 | + return new Promise((resolve, reject) => { |
| 22 | + pull( |
| 23 | + source, |
| 24 | + addStream, |
| 25 | + pull.collect((err, added) => { |
| 26 | + if (err) { |
| 27 | + // Tweak the error message and add more relevant infor for the CLI |
| 28 | + if (err.code === 'ERR_DIR_NON_RECURSIVE') { |
| 29 | + err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` |
| 30 | + } |
| 31 | + return reject(err) |
| 32 | + } |
| 33 | + |
| 34 | + if (options.silent) return resolve() |
| 35 | + |
| 36 | + if (options.quieter) { |
| 37 | + print(added.pop().hash) |
| 38 | + return resolve() |
33 | 39 | }
|
34 |
| - throw err |
35 |
| - } |
36 | 40 |
|
37 |
| - if (options.silent) return |
38 |
| - if (options.quieter) return print(added.pop().hash) |
39 |
| - |
40 |
| - sortBy(added, 'path') |
41 |
| - .reverse() |
42 |
| - .map((file) => { |
43 |
| - const log = options.quiet ? [] : ['added'] |
44 |
| - log.push(cidToString(file.hash, { base: options.cidBase })) |
45 |
| - if (!options.quiet && file.path.length > 0) log.push(file.path) |
46 |
| - return log.join(' ') |
47 |
| - }) |
48 |
| - .forEach((msg) => print(msg)) |
49 |
| - }) |
50 |
| - ) |
| 41 | + sortBy(added, 'path') |
| 42 | + .reverse() |
| 43 | + .map((file) => { |
| 44 | + const log = options.quiet ? [] : ['added'] |
| 45 | + log.push(cidToString(file.hash, { base: options.cidBase })) |
| 46 | + if (!options.quiet && file.path.length > 0) log.push(file.path) |
| 47 | + return log.join(' ') |
| 48 | + }) |
| 49 | + .forEach((msg) => print(msg)) |
| 50 | + |
| 51 | + resolve() |
| 52 | + }) |
| 53 | + ) |
| 54 | + }) |
51 | 55 | }
|
52 | 56 |
|
53 | 57 | module.exports = {
|
@@ -140,46 +144,45 @@ module.exports = {
|
140 | 144 | },
|
141 | 145 |
|
142 | 146 | handler (argv) {
|
143 |
| - const { ipfs } = argv |
144 |
| - const options = { |
145 |
| - strategy: argv.trickle ? 'trickle' : 'balanced', |
146 |
| - shardSplitThreshold: argv.enableShardingExperiment |
147 |
| - ? argv.shardSplitThreshold |
148 |
| - : Infinity, |
149 |
| - cidVersion: argv.cidVersion, |
150 |
| - rawLeaves: argv.rawLeaves, |
151 |
| - onlyHash: argv.onlyHash, |
152 |
| - hashAlg: argv.hash, |
153 |
| - wrapWithDirectory: argv.wrapWithDirectory, |
154 |
| - pin: argv.pin, |
155 |
| - chunker: argv.chunker |
156 |
| - } |
157 |
| - |
158 |
| - if (options.enableShardingExperiment && isDaemonOn()) { |
159 |
| - throw new Error('Error: Enabling the sharding experiment should be done on the daemon') |
160 |
| - } |
| 147 | + argv.resolve((async () => { |
| 148 | + const { ipfs } = argv |
| 149 | + const options = { |
| 150 | + strategy: argv.trickle ? 'trickle' : 'balanced', |
| 151 | + shardSplitThreshold: argv.enableShardingExperiment |
| 152 | + ? argv.shardSplitThreshold |
| 153 | + : Infinity, |
| 154 | + cidVersion: argv.cidVersion, |
| 155 | + rawLeaves: argv.rawLeaves, |
| 156 | + onlyHash: argv.onlyHash, |
| 157 | + hashAlg: argv.hash, |
| 158 | + wrapWithDirectory: argv.wrapWithDirectory, |
| 159 | + pin: argv.pin, |
| 160 | + chunker: argv.chunker |
| 161 | + } |
161 | 162 |
|
162 |
| - const source = argv.file |
163 |
| - ? globSource(...argv.file, { recursive: argv.recursive }) |
164 |
| - : toPull.source(process.stdin) // Pipe directly to ipfs.add |
| 163 | + if (options.enableShardingExperiment && isDaemonOn()) { |
| 164 | + throw new Error('Error: Enabling the sharding experiment should be done on the daemon') |
| 165 | + } |
165 | 166 |
|
166 |
| - const adder = ipfs.addPullStream(options) |
| 167 | + const source = argv.file |
| 168 | + ? globSource(...argv.file, { recursive: argv.recursive }) |
| 169 | + : toPull.source(process.stdin) // Pipe directly to ipfs.add |
167 | 170 |
|
168 |
| - // No progress or piping directly to ipfs.add: no need to getTotalBytes |
169 |
| - if (!argv.progress || !argv.file) { |
170 |
| - return addPipeline(source, adder, argv) |
171 |
| - } |
| 171 | + const adder = ipfs.addPullStream(options) |
172 | 172 |
|
173 |
| - getTotalBytes(argv.file, (err, totalBytes) => { |
174 |
| - if (err) throw err |
| 173 | + // No progress or piping directly to ipfs.add: no need to getTotalBytes |
| 174 | + if (!argv.progress || !argv.file) { |
| 175 | + return addPipeline(source, adder, argv) |
| 176 | + } |
175 | 177 |
|
| 178 | + const totalBytes = await getTotalBytes(argv.file) |
176 | 179 | const bar = createProgressBar(totalBytes)
|
177 | 180 |
|
178 | 181 | options.progress = byteLength => {
|
179 | 182 | bar.update(byteLength / totalBytes, { progress: byteman(byteLength, 2, 'MB') })
|
180 | 183 | }
|
181 | 184 |
|
182 |
| - addPipeline(source, adder, argv) |
183 |
| - }) |
| 185 | + return addPipeline(source, adder, argv) |
| 186 | + })()) |
184 | 187 | }
|
185 | 188 | }
|
0 commit comments