@@ -7,6 +7,7 @@ const byteman = require('byteman')
77const reduce = require ( 'async/reduce' )
88const mh = require ( 'multihashes' )
99const multibase = require ( 'multibase' )
10+ const toPull = require ( 'stream-to-pull-stream' )
1011const { print, isDaemonOn, createProgressBar } = require ( '../utils' )
1112const { cidToString } = require ( '../../utils/cid' )
1213const globSource = require ( '../../utils/files/glob-source' )
@@ -20,15 +21,9 @@ function getTotalBytes (paths, cb) {
2021 } , cb )
2122}
2223
23- function addPipeline ( paths , addStream , options ) {
24- const {
25- recursive,
26- quiet,
27- quieter,
28- silent
29- } = options
24+ function addPipeline ( source , addStream , options ) {
3025 pull (
31- globSource ( ... paths , { recursive } ) ,
26+ source ,
3227 addStream ,
3328 pull . collect ( ( err , added ) => {
3429 if ( err ) {
@@ -39,14 +34,15 @@ function addPipeline (paths, addStream, options) {
3934 throw err
4035 }
4136
42- if ( silent ) return
43- if ( quieter ) return print ( added . pop ( ) . hash )
37+ if ( options . silent ) return
38+ if ( options . quieter ) return print ( added . pop ( ) . hash )
4439
4540 sortBy ( added , 'path' )
4641 . reverse ( )
4742 . map ( ( file ) => {
48- const log = [ 'added' , cidToString ( file . hash , { base : options . cidBase } ) ]
49- if ( ! quiet && file . path . length > 0 ) log . push ( file . path )
43+ const log = options . quiet ? [ ] : [ 'added' ]
44+ log . push ( cidToString ( file . hash , { base : options . cidBase } ) )
45+ if ( ! options . quiet && file . path . length > 0 ) log . push ( file . path )
5046 return log . join ( ' ' )
5147 } )
5248 . forEach ( ( msg ) => print ( msg ) )
@@ -55,7 +51,7 @@ function addPipeline (paths, addStream, options) {
5551}
5652
5753module . exports = {
58- command : 'add < file...> ' ,
54+ command : 'add [ file...] ' ,
5955
6056 describe : 'Add a file to IPFS using the UnixFS data format' ,
6157
@@ -163,8 +159,15 @@ module.exports = {
163159 throw new Error ( 'Error: Enabling the sharding experiment should be done on the daemon' )
164160 }
165161
166- if ( ! argv . progress ) {
167- return addPipeline ( argv . file , ipfs . addPullStream ( options ) , argv )
162+ const source = argv . file
163+ ? globSource ( ...argv . file , { recursive : argv . recursive } )
164+ : toPull . source ( process . stdin ) // Pipe directly to ipfs.add
165+
166+ const adder = ipfs . addPullStream ( options )
167+
168+ // No progress or piping directly to ipfs.add: no need to getTotalBytes
169+ if ( ! argv . progress || ! argv . file ) {
170+ return addPipeline ( source , adder , argv )
168171 }
169172
170173 getTotalBytes ( argv . file , ( err , totalBytes ) => {
@@ -176,7 +179,7 @@ module.exports = {
176179 bar . update ( byteLength / totalBytes , { progress : byteman ( byteLength , 2 , 'MB' ) } )
177180 }
178181
179- addPipeline ( argv . file , ipfs . addPullStream ( options ) , argv )
182+ addPipeline ( source , adder , argv )
180183 } )
181184 }
182185}
0 commit comments