Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 4fcdd51

Browse files
authored
Merge pull request #73 from ipfs/fix-blocks
fix(exporter): add some parallel fetching of blocks where possible
2 parents 9540d73 + a9d303c commit 4fcdd51

File tree

3 files changed

+23
-9
lines changed

3 files changed

+23
-9
lines changed

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454
"is-ipfs": "^0.2.0",
5555
"multihashes": "^0.2.2",
5656
"pull-block": "^1.0.2",
57+
"pull-paramap": "^1.1.6",
5758
"pull-pushable": "^2.0.1",
5859
"pull-stream": "^3.4.5",
5960
"pull-traverse": "^1.0.3",
@@ -70,4 +71,4 @@
7071
"jbenet <[email protected]>",
7172
"nginnever <[email protected]>"
7273
]
73-
}
74+
}

src/exporters/dir.js

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,40 @@
22

33
const path = require('path')
44
const pull = require('pull-stream')
5+
const paramap = require('pull-paramap')
56

67
const fileExporter = require('./file')
78
const switchType = require('../util').switchType
89

910
// Logic to export a unixfs directory.
1011
module.exports = (node, name, dagService) => {
12+
// The algorithm below is as follows
13+
//
14+
// 1. Take all links from a given directory node
15+
// 2. Map each link to their full name (parent + link name) + hash
16+
// 3. Parallel map to
17+
// 3.1. Resolve the hash against the dagService
18+
// 3.2. Switch on the node type
19+
// - `directory`: return node
20+
// - `file`: use the fileExporter to load and return the file
21+
// 4. Flatten
1122
return pull(
1223
pull.values(node.links),
1324
pull.map((link) => ({
1425
path: path.join(name, link.name),
1526
hash: link.hash
1627
})),
17-
pull.map((item) => pull(
18-
dagService.getStream(item.hash),
19-
pull.map((n) => switchType(
28+
paramap((item, cb) => dagService.get(item.hash, (err, n) => {
29+
if (err) {
30+
return cb(err)
31+
}
32+
33+
cb(null, switchType(
2034
n,
2135
() => pull.values([item]),
2236
() => fileExporter(n, item.path, dagService)
23-
)),
24-
pull.flatten()
25-
)),
37+
))
38+
})),
2639
pull.flatten()
2740
)
2841
}

src/exporters/file.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
const traverse = require('pull-traverse')
44
const UnixFS = require('ipfs-unixfs')
55
const pull = require('pull-stream')
6+
const paramap = require('pull-paramap')
67

78
// Logic to export a single (possibly chunked) unixfs file.
89
module.exports = (node, name, ds) => {
@@ -18,8 +19,7 @@ module.exports = (node, name, ds) => {
1819
function visitor (node) {
1920
return pull(
2021
pull.values(node.links),
21-
pull.map((link) => ds.getStream(link.hash)),
22-
pull.flatten()
22+
paramap((link, cb) => ds.get(link.hash, cb))
2323
)
2424
}
2525

0 commit comments

Comments
 (0)