Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit c5e5c07

Browse files
author
Alan Shaw
authored
refactor: modularise files (#1772)
This is basically a mechanical change to separate `files-regular.js` into multiple modules - easier to read, debug and maintain. License: MIT Signed-off-by: Alan Shaw <[email protected]>
1 parent 07e6c00 commit c5e5c07

19 files changed

+703
-659
lines changed

src/core/components/files-regular.js

Lines changed: 0 additions & 511 deletions
This file was deleted.
Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
'use strict'
2+
3+
const { importer } = require('ipfs-unixfs-engine')
4+
const pull = require('pull-stream')
5+
const toPull = require('stream-to-pull-stream')
6+
const waterfall = require('async/waterfall')
7+
const isStream = require('is-stream')
8+
const isSource = require('is-pull-stream').isSource
9+
const CID = require('cids')
10+
const { parseChunkerString } = require('./utils')
11+
12+
const WRAPPER = 'wrapper/'
13+
14+
function noop () {}
15+
16+
function prepareFile (self, opts, file, callback) {
17+
opts = opts || {}
18+
19+
let cid = new CID(file.multihash)
20+
21+
if (opts.cidVersion === 1) {
22+
cid = cid.toV1()
23+
}
24+
25+
waterfall([
26+
(cb) => opts.onlyHash
27+
? cb(null, file)
28+
: self.object.get(file.multihash, Object.assign({}, opts, { preload: false }), cb),
29+
(node, cb) => {
30+
const b58Hash = cid.toBaseEncodedString()
31+
32+
let size = node.size
33+
34+
if (Buffer.isBuffer(node)) {
35+
size = node.length
36+
}
37+
38+
cb(null, {
39+
path: opts.wrapWithDirectory
40+
? file.path.substring(WRAPPER.length)
41+
: (file.path || b58Hash),
42+
hash: b58Hash,
43+
size
44+
})
45+
}
46+
], callback)
47+
}
48+
49+
function normalizeContent (opts, content) {
50+
if (!Array.isArray(content)) {
51+
content = [content]
52+
}
53+
54+
return content.map((data) => {
55+
// Buffer input
56+
if (Buffer.isBuffer(data)) {
57+
data = { path: '', content: pull.values([data]) }
58+
}
59+
60+
// Readable stream input
61+
if (isStream.readable(data)) {
62+
data = { path: '', content: toPull.source(data) }
63+
}
64+
65+
if (isSource(data)) {
66+
data = { path: '', content: data }
67+
}
68+
69+
if (data && data.content && typeof data.content !== 'function') {
70+
if (Buffer.isBuffer(data.content)) {
71+
data.content = pull.values([data.content])
72+
}
73+
74+
if (isStream.readable(data.content)) {
75+
data.content = toPull.source(data.content)
76+
}
77+
}
78+
79+
if (opts.wrapWithDirectory && !data.path) {
80+
throw new Error('Must provide a path when wrapping with a directory')
81+
}
82+
83+
if (opts.wrapWithDirectory) {
84+
data.path = WRAPPER + data.path
85+
}
86+
87+
return data
88+
})
89+
}
90+
91+
function preloadFile (self, opts, file) {
92+
const isRootFile = opts.wrapWithDirectory
93+
? file.path === ''
94+
: !file.path.includes('/')
95+
96+
const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false
97+
98+
if (shouldPreload) {
99+
self._preload(file.hash)
100+
}
101+
102+
return file
103+
}
104+
105+
function pinFile (self, opts, file, cb) {
106+
// Pin a file if it is the root dir of a recursive add or the single file
107+
// of a direct add.
108+
const pin = 'pin' in opts ? opts.pin : true
109+
const isRootDir = !file.path.includes('/')
110+
const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg
111+
if (shouldPin) {
112+
return self.pin.add(file.hash, { preload: false }, err => cb(err, file))
113+
} else {
114+
cb(null, file)
115+
}
116+
}
117+
118+
module.exports = function (self) {
119+
// Internal add func that gets used by all add funcs
120+
return function addPullStream (options = {}) {
121+
let chunkerOptions
122+
try {
123+
chunkerOptions = parseChunkerString(options.chunker)
124+
} catch (err) {
125+
return pull.map(() => { throw err })
126+
}
127+
const opts = Object.assign({}, {
128+
shardSplitThreshold: self._options.EXPERIMENTAL.sharding
129+
? 1000
130+
: Infinity
131+
}, options, chunkerOptions)
132+
133+
// CID v0 is for multihashes encoded with sha2-256
134+
if (opts.hashAlg && opts.cidVersion !== 1) {
135+
opts.cidVersion = 1
136+
}
137+
138+
let total = 0
139+
140+
const prog = opts.progress || noop
141+
const progress = (bytes) => {
142+
total += bytes
143+
prog(total)
144+
}
145+
146+
opts.progress = progress
147+
return pull(
148+
pull.map(normalizeContent.bind(null, opts)),
149+
pull.flatten(),
150+
importer(self._ipld, opts),
151+
pull.asyncMap(prepareFile.bind(null, self, opts)),
152+
pull.map(preloadFile.bind(null, self, opts)),
153+
pull.asyncMap(pinFile.bind(null, self, opts))
154+
)
155+
}
156+
}
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
'use strict'
2+
3+
const pull = require('pull-stream')
4+
const pushable = require('pull-pushable')
5+
const Duplex = require('readable-stream').Duplex
6+
7+
class AddHelper extends Duplex {
8+
constructor (pullStream, push, options) {
9+
super(Object.assign({ objectMode: true }, options))
10+
this._pullStream = pullStream
11+
this._pushable = push
12+
this._waitingPullFlush = []
13+
}
14+
15+
_read () {
16+
this._pullStream(null, (end, data) => {
17+
while (this._waitingPullFlush.length) {
18+
const cb = this._waitingPullFlush.shift()
19+
cb()
20+
}
21+
if (end) {
22+
if (end instanceof Error) {
23+
this.emit('error', end)
24+
}
25+
} else {
26+
this.push(data)
27+
}
28+
})
29+
}
30+
31+
_write (chunk, encoding, callback) {
32+
this._waitingPullFlush.push(callback)
33+
this._pushable.push(chunk)
34+
}
35+
}
36+
37+
module.exports = function (self) {
38+
return (options) => {
39+
options = options || {}
40+
41+
const p = pushable()
42+
const s = pull(
43+
p,
44+
self.addPullStream(options)
45+
)
46+
47+
const retStream = new AddHelper(s, p)
48+
49+
retStream.once('finish', () => p.end())
50+
51+
return retStream
52+
}
53+
}
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
'use strict'
2+
3+
const promisify = require('promisify-es6')
4+
const pull = require('pull-stream')
5+
const sort = require('pull-sort')
6+
const isStream = require('is-stream')
7+
const isSource = require('is-pull-stream').isSource
8+
const isString = require('lodash/isString')
9+
10+
module.exports = function (self) {
11+
const add = promisify((data, options, callback) => {
12+
if (typeof options === 'function') {
13+
callback = options
14+
options = {}
15+
}
16+
17+
options = options || {}
18+
19+
// Buffer, pull stream or Node.js stream
20+
const isBufferOrStream = obj => Buffer.isBuffer(obj) || isStream.readable(obj) || isSource(obj)
21+
// An object like { content?, path? }, where content isBufferOrStream and path isString
22+
const isContentObject = obj => {
23+
if (typeof obj !== 'object') return false
24+
// path is optional if content is present
25+
if (obj.content) return isBufferOrStream(obj.content)
26+
// path must be a non-empty string if no content
27+
return Boolean(obj.path) && isString(obj.path)
28+
}
29+
// An input atom: a buffer, stream or content object
30+
const isInput = obj => isBufferOrStream(obj) || isContentObject(obj)
31+
// All is ok if data isInput or data is an array of isInput
32+
const ok = isInput(data) || (Array.isArray(data) && data.every(isInput))
33+
34+
if (!ok) {
35+
return callback(new Error('invalid input: expected buffer, readable stream, pull stream, object or array of objects'))
36+
}
37+
38+
pull(
39+
pull.values([data]),
40+
self.addPullStream(options),
41+
sort((a, b) => {
42+
if (a.path < b.path) return 1
43+
if (a.path > b.path) return -1
44+
return 0
45+
}),
46+
pull.collect(callback)
47+
)
48+
})
49+
50+
return function () {
51+
const args = Array.from(arguments)
52+
53+
// If we .add(<pull stream>), then promisify thinks the pull stream
54+
// is a callback! Add an empty options object in this case so that a
55+
// promise is returned.
56+
if (args.length === 1 && isSource(args[0])) {
57+
args.push({})
58+
}
59+
60+
return add.apply(null, args)
61+
}
62+
}
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
'use strict'
2+
3+
const { exporter } = require('ipfs-unixfs-engine')
4+
const pull = require('pull-stream')
5+
const deferred = require('pull-defer')
6+
const { normalizePath } = require('./utils')
7+
8+
module.exports = function (self) {
9+
return function catPullStream (ipfsPath, options) {
10+
if (typeof ipfsPath === 'function') {
11+
throw new Error('You must supply an ipfsPath')
12+
}
13+
14+
options = options || {}
15+
16+
ipfsPath = normalizePath(ipfsPath)
17+
const pathComponents = ipfsPath.split('/')
18+
const restPath = normalizePath(pathComponents.slice(1).join('/'))
19+
const filterFile = (file) => (restPath && file.path === restPath) || (file.path === ipfsPath)
20+
21+
if (options.preload !== false) {
22+
self._preload(pathComponents[0])
23+
}
24+
25+
const d = deferred.source()
26+
27+
pull(
28+
exporter(ipfsPath, self._ipld, options),
29+
pull.filter(filterFile),
30+
pull.take(1),
31+
pull.collect((err, files) => {
32+
if (err) {
33+
return d.abort(err)
34+
}
35+
36+
if (!files.length) {
37+
return d.abort(new Error('No such file'))
38+
}
39+
40+
const file = files[0]
41+
42+
if (!file.content && file.type === 'dir') {
43+
return d.abort(new Error('this dag node is a directory'))
44+
}
45+
46+
if (!file.content) {
47+
return d.abort(new Error('this dag node has no content'))
48+
}
49+
50+
d.resolve(file.content)
51+
})
52+
)
53+
54+
return d
55+
}
56+
}
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
'use strict'
2+
3+
const toStream = require('pull-stream-to-stream')
4+
5+
module.exports = function (self) {
6+
return (ipfsPath, options) => toStream.source(self.catPullStream(ipfsPath, options))
7+
}
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
'use strict'
2+
3+
const promisify = require('promisify-es6')
4+
const pull = require('pull-stream')
5+
6+
module.exports = function (self) {
7+
return promisify((ipfsPath, options, callback) => {
8+
if (typeof options === 'function') {
9+
callback = options
10+
options = {}
11+
}
12+
13+
pull(
14+
self.catPullStream(ipfsPath, options),
15+
pull.collect((err, buffers) => {
16+
if (err) { return callback(err) }
17+
callback(null, Buffer.concat(buffers))
18+
})
19+
)
20+
})
21+
}
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
'use strict'
2+
3+
const { exporter } = require('ipfs-unixfs-engine')
4+
const pull = require('pull-stream')
5+
const errCode = require('err-code')
6+
const { normalizePath } = require('./utils')
7+
8+
module.exports = function (self) {
9+
return (ipfsPath, options) => {
10+
options = options || {}
11+
12+
if (options.preload !== false) {
13+
let pathComponents
14+
15+
try {
16+
pathComponents = normalizePath(ipfsPath).split('/')
17+
} catch (err) {
18+
return pull.error(errCode(err, 'ERR_INVALID_PATH'))
19+
}
20+
21+
self._preload(pathComponents[0])
22+
}
23+
24+
return exporter(ipfsPath, self._ipld, options)
25+
}
26+
}

0 commit comments

Comments
 (0)