@@ -6,9 +6,10 @@ const log = debug('cli:version')
6
6
log . error = debug ( 'cli:version:error' )
7
7
const bs58 = require ( 'bs58' )
8
8
const fs = require ( 'fs' )
9
- const parallelLimit = require ( 'run-parallel-limit' )
10
9
const path = require ( 'path' )
11
10
const glob = require ( 'glob' )
11
+ const sortBy = require ( 'lodash.sortby' )
12
+ const mapLimit = require ( 'map-limit' )
12
13
13
14
function checkPath ( inPath , recursive ) {
14
15
// This function is to check for the following possible inputs
@@ -48,50 +49,70 @@ module.exports = {
48
49
} ,
49
50
50
51
handler ( argv ) {
51
- let rs
52
-
53
52
let inPath = checkPath ( argv . file , argv . recursive )
54
53
55
- glob ( path . join ( inPath , '/**/*' ) , ( err , res ) => {
54
+ utils . getIPFS ( ( err , ipfs ) => {
56
55
if ( err ) {
57
56
throw err
58
57
}
59
- utils . getIPFS ( ( err , ipfs ) => {
58
+
59
+ glob ( path . join ( inPath , '/**/*' ) , ( err , res ) => {
60
60
if ( err ) {
61
61
throw err
62
62
}
63
+
63
64
ipfs . files . createAddStream ( ( err , i ) => {
64
- if ( err ) throw err
65
- var filePair
65
+ if ( err ) {
66
+ throw err
67
+ }
68
+ const added = [ ]
69
+
66
70
i . on ( 'data' , ( file ) => {
67
- console . log ( 'added' , bs58 . encode ( file . node . multihash ( ) ) . toString ( ) , file . path )
71
+ const hash = bs58 . encode ( file . node . multihash ( ) ) . toString ( )
72
+ added . push ( { hash, path : file . path } )
68
73
} )
69
- i . once ( 'end' , ( ) => {
70
- return
74
+
75
+ i . on ( 'end' , ( ) => {
76
+ sortBy ( added , 'path' )
77
+ . reverse ( )
78
+ . map ( ( file ) => `added ${ file . hash } ${ file . path } ` )
79
+ . forEach ( ( msg ) => console . log ( msg ) )
71
80
} )
72
- if ( res . length !== 0 ) {
73
- const index = inPath . lastIndexOf ( '/' )
74
- parallelLimit ( res . map ( ( element ) => ( callback ) => {
75
- if ( ! fs . statSync ( element ) . isDirectory ( ) ) {
76
- i . write ( {
77
- path : element . substring ( index + 1 , element . length ) ,
78
- content : fs . createReadStream ( element )
79
- } )
80
- }
81
- callback ( )
82
- } ) , 10 , ( err ) => {
81
+
82
+ if ( res . length === 0 ) {
83
+ res = [ inPath ]
84
+ }
85
+
86
+ const writeToStream = ( stream , element ) => {
87
+ const index = inPath . lastIndexOf ( '/' ) + 1
88
+ i . write ( {
89
+ path : element . substring ( index , element . length ) ,
90
+ content : fs . createReadStream ( element )
91
+ } )
92
+ }
93
+
94
+ mapLimit ( res , 50 , ( file , cb ) => {
95
+ fs . stat ( file , ( err , stat ) => {
83
96
if ( err ) {
84
- throw err
97
+ return cb ( err )
85
98
}
86
- i . end ( )
99
+ return cb ( null , {
100
+ path : file ,
101
+ isDirectory : stat . isDirectory ( )
102
+ } )
87
103
} )
88
- } else {
89
- rs = fs . createReadStream ( inPath )
90
- inPath = inPath . substring ( inPath . lastIndexOf ( '/' ) + 1 , inPath . length )
91
- filePair = { path : inPath , content : rs }
92
- i . write ( filePair )
104
+ } , ( err , res ) => {
105
+ if ( err ) {
106
+ throw err
107
+ }
108
+
109
+ res
110
+ . filter ( ( elem ) => ! elem . isDirectory )
111
+ . map ( ( elem ) => elem . path )
112
+ . forEach ( ( elem ) => writeToStream ( i , elem ) )
113
+
93
114
i . end ( )
94
- }
115
+ } )
95
116
} )
96
117
} )
97
118
} )
0 commit comments