@@ -8,20 +8,29 @@ const bs58 = require('bs58')
8
8
const Readable = require ( 'readable-stream' )
9
9
const path = require ( 'path' )
10
10
const fs = require ( 'fs' )
11
- const isNode = require ( 'detect-node' )
12
11
const bl = require ( 'bl' )
12
+ const concat = require ( 'concat-stream' )
13
+ const through = require ( 'through2' )
13
14
14
15
module . exports = ( common ) => {
15
16
describe ( '.files' , ( ) => {
16
17
let smallFile
17
18
let bigFile
19
+ let directoryContent
18
20
let ipfs
19
21
20
22
before ( ( done ) => {
21
- smallFile = fs . readFileSync ( path . join ( __dirname , './data/testfile.txt' )
22
- )
23
- bigFile = fs . readFileSync ( path . join ( __dirname , './data/15mb.random' )
24
- )
23
+ smallFile = fs . readFileSync ( path . join ( __dirname , './data/testfile.txt' ) )
24
+ bigFile = fs . readFileSync ( path . join ( __dirname , './data/15mb.random' ) )
25
+
26
+ directoryContent = {
27
+ 'pp.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/pp.txt' ) ) ,
28
+ 'holmes.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/holmes.txt' ) ) ,
29
+ 'jungle.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/jungle.txt' ) ) ,
30
+ 'alice.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/alice.txt' ) ) ,
31
+ 'files/hello.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/files/hello.txt' ) ) ,
32
+ 'files/ipfs.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/files/ipfs.txt' ) )
33
+ }
25
34
26
35
common . setup ( ( err , _ipfs ) => {
27
36
expect ( err ) . to . not . exist
@@ -102,15 +111,9 @@ module.exports = (common) => {
102
111
} )
103
112
104
113
it ( 'add a nested dir as array' , ( done ) => {
105
- if ( ! isNode ) {
106
- return done ( )
107
- // can't run this test cause browserify
108
- // can't shim readFileSync in runtime
109
- }
110
- const base = path . join ( __dirname , 'data/test-folder' )
111
114
const content = ( name ) => ( {
112
115
path : `test-folder/${ name } ` ,
113
- content : fs . readFileSync ( path . join ( base , name ) )
116
+ content : directoryContent [ name ]
114
117
} )
115
118
const emptyDir = ( name ) => ( {
116
119
path : `test-folder/${ name } `
@@ -131,30 +134,23 @@ module.exports = (common) => {
131
134
132
135
const added = res [ res . length - 1 ]
133
136
const mh = bs58 . encode ( added . node . multihash ( ) ) . toString ( )
134
- expect ( mh ) . to . equal ( 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' )
135
- expect ( added . path ) . to . equal ( 'test-folder' )
136
137
expect ( added . node . links ) . to . have . length ( 6 )
138
+ expect ( added . path ) . to . equal ( 'test-folder' )
139
+ expect ( mh ) . to . equal ( 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' )
140
+
137
141
done ( )
138
142
} )
139
143
} )
140
144
141
145
describe ( '.createAddStream' , ( ) => {
142
146
it ( 'stream of valid files and dirs' , ( done ) => {
143
- if ( ! isNode ) {
144
- return done ( )
145
- // can't run this test cause browserify
146
- // can't shim readFileSync in runtime
147
- }
148
-
149
- const base = path . join ( __dirname , 'data/test-folder' )
150
147
const content = ( name ) => ( {
151
148
path : `test-folder/${ name } ` ,
152
- content : fs . readFileSync ( path . join ( base , name ) )
149
+ content : directoryContent [ name ]
153
150
} )
154
151
const emptyDir = ( name ) => ( {
155
152
path : `test-folder/${ name } `
156
153
} )
157
-
158
154
const files = [
159
155
content ( 'pp.txt' ) ,
160
156
content ( 'holmes.txt' ) ,
@@ -243,7 +239,7 @@ module.exports = (common) => {
243
239
} )
244
240
245
241
describe ( '.cat' , ( ) => {
246
- it ( 'with a bas58 multihash encoded string' , ( ) => {
242
+ it ( 'with a base58 multihash encoded string' , ( ) => {
247
243
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
248
244
249
245
return ipfs . cat ( hash )
@@ -275,13 +271,167 @@ module.exports = (common) => {
275
271
const hash = new Buffer ( bs58 . decode ( 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ) )
276
272
return ipfs . cat ( hash )
277
273
. then ( ( stream ) => {
278
- stream . pipe ( bl ( ( err , bldata ) => {
274
+ stream . pipe ( bl ( ( err , data ) => {
279
275
expect ( err ) . to . not . exist
280
- expect ( bldata . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
276
+ expect ( data . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
281
277
} ) )
282
278
} )
283
279
} )
284
280
} )
285
281
} )
282
+
283
+ describe ( '.get' , ( ) => {
284
+ it ( 'with a base58 encoded multihash' , ( done ) => {
285
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
286
+ ipfs . files . get ( hash , ( err , stream ) => {
287
+ expect ( err ) . to . not . exist
288
+ stream . pipe ( concat ( ( files ) => {
289
+ expect ( err ) . to . not . exist
290
+ expect ( files ) . to . be . length ( 1 )
291
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
292
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
293
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
294
+ done ( )
295
+ } ) )
296
+ } ) )
297
+ } )
298
+ } )
299
+
300
+ it ( 'with a multihash' , ( done ) => {
301
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
302
+ const mhBuf = new Buffer ( bs58 . decode ( hash ) )
303
+ ipfs . files . get ( mhBuf , ( err , stream ) => {
304
+ expect ( err ) . to . not . exist
305
+ stream . pipe ( concat ( ( files ) => {
306
+ expect ( files ) . to . be . length ( 1 )
307
+ expect ( files [ 0 ] . path ) . to . deep . equal ( hash )
308
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
309
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
310
+ done ( )
311
+ } ) )
312
+ } ) )
313
+ } )
314
+ } )
315
+
316
+ it ( 'large file' , ( done ) => {
317
+ const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
318
+ ipfs . files . get ( hash , ( err , stream ) => {
319
+ expect ( err ) . to . not . exist
320
+
321
+ // accumulate the files and their content
322
+ var files = [ ]
323
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
324
+ file . content . pipe ( concat ( ( content ) => {
325
+ files . push ( {
326
+ path : file . path ,
327
+ content : content
328
+ } )
329
+ next ( )
330
+ } ) )
331
+ } , ( ) => {
332
+ expect ( files . length ) . to . equal ( 1 )
333
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
334
+ expect ( files [ 0 ] . content ) . to . deep . equal ( bigFile )
335
+ done ( )
336
+ } ) )
337
+ } )
338
+ } )
339
+
340
+ it ( 'directory' , ( done ) => {
341
+ const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
342
+ ipfs . files . get ( hash , ( err , stream ) => {
343
+ expect ( err ) . to . not . exist
344
+
345
+ // accumulate the files and their content
346
+ var files = [ ]
347
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
348
+ if ( file . content ) {
349
+ file . content . pipe ( concat ( ( content ) => {
350
+ files . push ( {
351
+ path : file . path ,
352
+ content : content
353
+ } )
354
+ next ( )
355
+ } ) )
356
+ } else {
357
+ files . push ( file )
358
+ next ( )
359
+ }
360
+ } , ( ) => {
361
+ // Check paths
362
+ var paths = files . map ( ( file ) => {
363
+ return file . path
364
+ } )
365
+ expect ( paths ) . to . deep . equal ( [
366
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' ,
367
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt' ,
368
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder' ,
369
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files' ,
370
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty' ,
371
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt' ,
372
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt' ,
373
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt' ,
374
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt' ,
375
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
376
+ ] )
377
+
378
+ // Check contents
379
+ var contents = files . map ( ( file ) => {
380
+ return file . content ? file . content : null
381
+ } )
382
+ expect ( contents ) . to . deep . equal ( [
383
+ null ,
384
+ directoryContent [ 'alice.txt' ] ,
385
+ null ,
386
+ null ,
387
+ null ,
388
+ directoryContent [ 'files/hello.txt' ] ,
389
+ directoryContent [ 'files/ipfs.txt' ] ,
390
+ directoryContent [ 'holmes.txt' ] ,
391
+ directoryContent [ 'jungle.txt' ] ,
392
+ directoryContent [ 'pp.txt' ]
393
+ ] )
394
+ done ( )
395
+ } ) )
396
+ } )
397
+ } )
398
+
399
+ describe ( 'promise' , ( ) => {
400
+ it ( 'with a base58 encoded string' , ( done ) => {
401
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
402
+ ipfs . files . get ( hash )
403
+ . then ( ( stream ) => {
404
+ stream . pipe ( concat ( ( files ) => {
405
+ expect ( files ) . to . be . length ( 1 )
406
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
407
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
408
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
409
+ done ( )
410
+ } ) )
411
+ } ) )
412
+ } )
413
+ . catch ( ( err ) => {
414
+ expect ( err ) . to . not . exist
415
+ } )
416
+ } )
417
+
418
+ it ( 'errors on invalid key' , ( done ) => {
419
+ const hash = 'somethingNotMultihash'
420
+ ipfs . files . get ( hash )
421
+ . then ( ( stream ) => { } )
422
+ . catch ( ( err ) => {
423
+ expect ( err ) . to . exist
424
+ const errString = err . toString ( )
425
+ if ( errString === 'Error: invalid ipfs ref path' ) {
426
+ expect ( err . toString ( ) ) . to . contain ( 'Error: invalid ipfs ref path' )
427
+ }
428
+ if ( errString === 'Error: Invalid Key' ) {
429
+ expect ( err . toString ( ) ) . to . contain ( 'Error: Invalid Key' )
430
+ }
431
+ done ( )
432
+ } )
433
+ } )
434
+ } )
435
+ } )
286
436
} )
287
437
}
0 commit comments