@@ -10,80 +10,143 @@ IPFS unixFS Engine
10
10
[ ![ Dependency Status] ( https://david-dm.org/ipfs/js-ipfs-unixfs-engine.svg?style=flat-square )] ( https://david-dm.org/ipfs/js-ipfs-unixfs-engine )
11
11
[ ![ js-standard-style] ( https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square )] ( https://github.com/feross/standard )
12
12
13
- ## Example
13
+ ## Example Importer
14
14
15
15
Let's create a little directory to import:
16
16
``` sh
17
17
$ cd /tmp
18
18
$ mkdir foo
19
19
$ echo ' hello' > foo/bar
20
- $ echo ' warld ' > foo/quux
20
+ $ echo ' world ' > foo/quux
21
21
```
22
22
23
23
And write the importing logic:
24
24
``` js
25
25
// Dependencies to create a DAG Service (where the dir will be imported into)
26
- var memStore = require (' abstract-blob-store' )
27
- var ipfsRepo = require (' ipfs-repo' )
28
- var ipfsBlock = require (' ipfs-block' )
29
- var ipfsBlockService = require (' ipfs-block' )
30
- var ipfsMerkleDag = require (' ipfs-merkle-dag' )
26
+ const memStore = require (' abstract-blob-store' )
27
+ const ipfsRepo = require (' ipfs-repo' )
28
+ const ipfsBlock = require (' ipfs-block' )
29
+ const ipfsBlockService = require (' ipfs-block-service' )
30
+ const ipfsMerkleDag = require (' ipfs-merkle-dag' )
31
+ const fs = require (' fs' )
31
32
32
- var repo = new ipfsRepo (' ' , { stores: memStore })
33
- var blocks = new ipfsBlockService (repo)
34
- var dag = new ipfsMerkleDag.DAGService (blocks)
33
+ const repo = new ipfsRepo (' ' , { stores: memStore })
34
+ const blocks = new ipfsBlockService (repo)
35
+ const dag = new ipfsMerkleDag.DAGService (blocks)
35
36
36
37
37
- var ipfsData = require (' ipfs-unixfs-engine' )
38
-
39
- // Import /tmp/foo
40
- ipfsData .import (' /tmp/foo' , dag, {
41
- recursive: true
42
- }, done)
38
+ const Importer = require (' ipfs-unixfs-engine' ).importer
39
+ const add = new Importer (dag)
43
40
41
+ // An array to hold the return of nested file/dir info from the importer
44
42
// A root DAG Node is received upon completion
45
- function done (err , rootStat ) {
46
- if (err) { throw err }
47
- console .log (rootStat)
48
- }
43
+
44
+ const res = []
45
+
46
+ // Import path /tmp/foo/bar
47
+
48
+ const rs = fs .createReadStream (file)
49
+ const rs2 = fs .createReadStream (file2)
50
+ const input = {path: / tmp/ foo/ bar, stream: rs}
51
+ const input2 = {path: / tmp/ foo/ quxx, stream: rs2}
52
+
53
+ // Listen for the data event from the importer stream
54
+
55
+ add .on (' data' , (info ) => {
56
+ res .push (info)
57
+ })
58
+
59
+ // The end event of the stream signals that the importer is done
60
+
61
+ add .on (' end' , () => {
62
+ console .log (' Finished adding files!' )
63
+ return
64
+ })
65
+
66
+ // Calling write on the importer to add the file/object tuples
67
+
68
+ add .write (input)
69
+ add .end ()
49
70
```
50
71
51
- When run, the stat of root DAG Node is outputted:
72
+ When run, the stat of DAG Node is outputted for each file on data event until the root :
52
73
53
74
```
54
- { Hash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
75
+ { multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
76
+ Size: 39243,
77
+ path: '/tmp/foo/bar' }
78
+
79
+ { multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
55
80
Size: 59843,
56
- Name: 'foo' }
81
+ path: '/tmp/foo/quxx' }
82
+
83
+ { multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
84
+ Size: 93242,
85
+ path: '/tmp/foo' }
86
+
87
+ { multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
88
+ Size: 94234,
89
+ path: '/tmp' }
90
+
57
91
```
58
92
59
93
## API
60
94
61
95
``` js
62
- var importer = require (' ipfs-data-importing ' )
96
+ const Importer = require (' ipfs-unixfs-engine ' ). importer
63
97
```
64
98
65
- ### importer.import(target, dagService, opts, cb )
99
+ ### const add = new Importer(dag )
66
100
67
- ` target ` can be a ` string ` , ` Buffer ` , or ` Stream ` . When it's a string, the file
68
- or directory structure rooted on the filesystem at ` target ` is imported, with
69
- the hierarchy preserved. If a Buffer or Stream, a single DAG node will be
70
- imported representing the buffer or stream's contents.
101
+ The importer is a duplex stream in object mode that writes inputs of tuples
102
+ of path and readable streams of data. You can stream an array of files to the
103
+ importer, just call the 'end' function to signal that you are done inputting file/s.
104
+ Listen to the 'data' for the returned informtion 'multihash, size and path' for
105
+ each file added. Listen to the 'end' event from the stream to know when the
106
+ importer has finished importing files. Input file paths with directory structure
107
+ will preserve the hierarchy in the dag node.
71
108
72
109
Uses the [ DAG Service] ( https://github.com/vijayee/js-ipfs-merkle-dag/ ) instance
73
- ` dagService ` . Accepts the following ` opts ` :
110
+ ` dagService ` .
111
+
112
+ ## Example Exporter
113
+
114
+ ```
115
+ const ipfsRepo = require('ipfs-repo')
116
+ const ipfsBlock = require('ipfs-block')
117
+ const ipfsBlockService = require('ipfs-block-service')
118
+ const ipfsMerkleDag = require('ipfs-merkle-dag')
119
+
120
+ const repo = new ipfsRepo('', { stores: memStore })
121
+ const blocks = new ipfsBlockService(repo)
122
+ const dag = new ipfsMerkleDag.DAGService(blocks)
123
+
124
+ // Create an export event with the hash you want to export and a dag service
125
+
126
+ const exportEvent = Exporter(hash, dag)
127
+
128
+ // Pipe the return stream to console
129
+
130
+ exportEvent.on('file', (result) => {
131
+ result.stream.pipe(process.stdout)
132
+ }
133
+ ```
134
+
135
+ ##API
136
+ ``` js
137
+ const Importer = require (' ipfs-unixfs-engine' ).exporter
138
+ ```
74
139
75
- - ` recursive ` : whether to recurse into directories. Defaults to ` false ` .
140
+ The exporter is an event emitter that returns a stream of the file found
141
+ by the multihash of the file from the dag service.
76
142
77
- Calls the callback ` cb(err, stat) ` on completion or error, where ` stat ` is an
78
- object with the ` Hash ` , ` Size ` , and ` Name ` of the root
79
- [ ` DAGNode ` ] ( https://github.com/vijayee/js-ipfs-merkle-dag/ ) .
80
143
81
144
## install
82
145
83
146
With [ npm] ( https://npmjs.org/ ) installed, run
84
147
85
148
```
86
- $ npm install ipfs-data-importing
149
+ $ npm install ipfs-unixfs-engine
87
150
```
88
151
89
152
## license
0 commit comments