Skip to content

Commit d1a9ddd

Browse files
committed
Merge pull request #6 from nginnever/master
added buffer importer
2 parents e80e895 + b39351d commit d1a9ddd

File tree

11 files changed

+577
-80
lines changed

11 files changed

+577
-80
lines changed

.travis.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
2+
language: node_js
3+
node_js:
4+
- '4'
5+
- '5'
6+
7+
before_install:
8+
- npm i -g npm
9+
# Workaround for a permissions issue with Travis virtual machine images
10+
11+
addons:
12+
firefox: 'latest'
13+
14+
before_script:
15+
- export DISPLAY=:99.0
16+
- sh -e /etc/init.d/xvfb start
17+
18+
script:
19+
- npm test

karma.conf.js

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
const path = require('path')
2+
3+
module.exports = function (config) {
4+
config.set({
5+
basePath: '',
6+
frameworks: ['mocha'],
7+
8+
files: [
9+
'tests/browser.js'
10+
],
11+
12+
preprocessors: {
13+
'tests/*': ['webpack', 'sourcemap']
14+
},
15+
16+
webpack: {
17+
devtool: 'eval',
18+
resolve: {
19+
extensions: ['', '.js', '.json']
20+
},
21+
externals: {
22+
fs: '{}'
23+
},
24+
node: {
25+
Buffer: true
26+
},
27+
module: {
28+
loaders: [
29+
{ test: /\.json$/, loader: 'json' }
30+
],
31+
postLoaders: [
32+
{
33+
include: path.resolve(__dirname, 'node_modules/ipfs-unixfs'),
34+
loader: 'transform?brfs'
35+
}
36+
]
37+
}
38+
},
39+
40+
webpackMiddleware: {
41+
noInfo: true,
42+
stats: {
43+
colors: true
44+
}
45+
},
46+
reporters: ['spec'],
47+
port: 9876,
48+
colors: true,
49+
logLevel: config.LOG_INFO,
50+
autoWatch: false,
51+
browsers: process.env.TRAVIS ? ['Firefox'] : ['Chrome'],
52+
singleRun: true
53+
})
54+
}

package.json

Lines changed: 27 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,14 @@
55
"main": "src/index.js",
66
"scripts": {
77
"lint": "standard",
8-
"coverage": "istanbul cover --print both -- _mocha tests/index.js",
9-
"test": "mocha tests/index.js"
8+
"test": "npm run test:node && npm run test:browser",
9+
"test:node": "mocha tests/index.js",
10+
"test:browser": "karma start karma.conf.js"
1011
},
12+
"pre-commit": [
13+
"lint",
14+
"test"
15+
],
1116
"repository": {
1217
"type": "git",
1318
"url": "git+https://github.com/diasdavid/js-ipfs-data-importing.git"
@@ -22,16 +27,34 @@
2227
},
2328
"homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme",
2429
"devDependencies": {
30+
"2": "0.0.1",
31+
"brfs": "^1.4.3",
2532
"bs58": "^3.0.0",
33+
"buffer-loader": "0.0.1",
2634
"chai": "^3.4.1",
2735
"fs-blob-store": "^5.2.1",
28-
"ipfs-repo": "^0.5.0",
36+
"highland": "^2.7.1",
37+
"idb-plus-blob-store": "^1.0.0",
38+
"ipfs-repo": "^0.5.1",
2939
"istanbul": "^0.4.1",
40+
"json-loader": "^0.5.4",
41+
"karma": "^0.13.19",
42+
"karma-chrome-launcher": "^0.2.2",
43+
"karma-cli": "^0.1.2",
44+
"karma-firefox-launcher": "^0.1.7",
45+
"karma-mocha": "^0.2.1",
46+
"karma-sourcemap-loader": "^0.3.7",
47+
"karma-spec-reporter": "0.0.24",
48+
"karma-webpack": "^1.7.0",
3049
"mocha": "^2.3.4",
3150
"ncp": "^2.0.0",
3251
"pre-commit": "^1.1.2",
52+
"raw-loader": "^0.5.1",
3353
"rimraf": "^2.5.1",
34-
"standard": "^5.4.1"
54+
"standard": "^6.0.8",
55+
"string-to-stream": "^1.0.1",
56+
"transform-loader": "^0.2.3",
57+
"webpack": "^2.0.7-beta"
3558
},
3659
"dependencies": {
3760
"async": "^1.5.2",

src/chunker-fixed-size.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ function FixedSizeChunker (size) {
2323
var chunk = new Buffer(size, 'binary')
2424
var newBuf = new Buffer(buf.length - size, 'binary')
2525
buf.copy(chunk, 0, 0, size)
26-
buf.copy(newBuf, 0, size - 1, buf.length - size)
26+
buf.copy(newBuf, 0, size, buf.length)
2727
buf = newBuf
2828
that.push(chunk)
2929

src/index.js

Lines changed: 93 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,33 +7,48 @@ const FixedSizeChunker = require('./chunker-fixed-size')
77
const through2 = require('through2')
88
const UnixFS = require('ipfs-unixfs')
99
const async = require('async')
10-
1110
exports = module.exports
1211

1312
const CHUNK_SIZE = 262144
1413

1514
// Use a layout + chunkers to convert a directory (or file) to the layout format
16-
exports.import = (options, callback) => {
15+
exports.import = function (options, callback) {
1716
// options.path : what to import
17+
// options.buffer : import a buffer
18+
// options.filename : optional file name for buffer
19+
// options.stream : import a stream
1820
// options.recursive : follow dirs
1921
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
2022
// options.dag-service : instance of block service
2123
const dagService = options.dagService
2224

23-
const stats = fs.statSync(options.path)
24-
if (stats.isFile()) {
25-
fileImporter(options.path, callback)
26-
} else if (stats.isDirectory() && options.recursive) {
27-
dirImporter(options.path, callback)
28-
} else {
29-
return callback(new Error('recursive must be true to add a directory'))
25+
if (options.buffer) {
26+
if (!Buffer.isBuffer(options.buffer)) {
27+
return callback(new Error('buffer importer must take a buffer'))
28+
}
29+
bufferImporter(options.buffer, callback)
30+
} else if (options.stream) {
31+
if (!(typeof options.stream.on === 'function')) {
32+
return callback(new Error('stream importer must take a readable stream'))
33+
}
34+
// TODO Create Stream Importer
35+
// streamImporter(options.stream, callback)
36+
return callback(new Error('stream importer has not been built yet'))
37+
} else if (options.path) {
38+
const stats = fs.statSync(options.path)
39+
if (stats.isFile()) {
40+
fileImporter(options.path, callback)
41+
} else if (stats.isDirectory() && options.recursive) {
42+
dirImporter(options.path, callback)
43+
} else {
44+
return callback(new Error('recursive must be true to add a directory'))
45+
}
3046
}
3147

3248
function fileImporter (path, callback) {
3349
const stats = fs.statSync(path)
3450
if (stats.size > CHUNK_SIZE) {
3551
const links = [] // { Hash: , Size: , Name: }
36-
3752
fs.createReadStream(path)
3853
.pipe(new FixedSizeChunker(CHUNK_SIZE))
3954
.pipe(through2((chunk, enc, cb) => {
@@ -53,7 +68,6 @@ exports.import = (options, callback) => {
5368
leafSize: raw.fileSize(),
5469
Name: ''
5570
})
56-
5771
cb()
5872
})
5973
}, (cb) => {
@@ -83,7 +97,8 @@ exports.import = (options, callback) => {
8397
}))
8498
} else {
8599
// create just one file node with the data directly
86-
const fileUnixFS = new UnixFS('file', fs.readFileSync(path))
100+
var buf = fs.readFileSync(path)
101+
const fileUnixFS = new UnixFS('file', buf)
87102
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())
88103

89104
dagService.add(fileNode, (err) => {
@@ -166,9 +181,73 @@ exports.import = (options, callback) => {
166181
})
167182
})
168183
}
184+
function bufferImporter (buffer, callback) {
185+
const links = [] // { Hash: , Size: , Name: }
186+
if (buffer.length > CHUNK_SIZE) {
187+
var fsc = new FixedSizeChunker(CHUNK_SIZE)
188+
fsc.write(buffer)
189+
fsc.end()
190+
fsc.pipe(through2((chunk, enc, cb) => {
191+
// TODO: check if this is right (I believe it should be type 'raw'
192+
// https://github.com/ipfs/go-ipfs/issues/2331
193+
const raw = new UnixFS('file', chunk)
194+
const node = new mDAG.DAGNode(raw.marshal())
195+
196+
dagService.add(node, function (err) {
197+
if (err) {
198+
return log.err(err)
199+
}
200+
links.push({
201+
Hash: node.multihash(),
202+
Size: node.size(),
203+
leafSize: raw.fileSize(),
204+
Name: ''
205+
})
206+
207+
cb()
208+
})
209+
}, (cb) => {
210+
const file = new UnixFS('file')
211+
const parentNode = new mDAG.DAGNode()
212+
links.forEach((l) => {
213+
file.addBlockSize(l.leafSize)
214+
const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash)
215+
parentNode.addRawLink(link)
216+
})
217+
parentNode.data = file.marshal()
218+
dagService.add(parentNode, (err) => {
219+
if (err) {
220+
return log.err(err)
221+
}
222+
// an optional file name provided
223+
const fileName = options.filename
169224

170-
// function bufferImporter (path) {}
171-
// function streamImporter (path) {}
225+
callback(null, {
226+
Hash: parentNode.multihash(),
227+
Size: parentNode.size(),
228+
Name: fileName
229+
}) && cb()
230+
})
231+
}))
232+
} else {
233+
// create just one file node with the data directly
234+
const fileUnixFS = new UnixFS('file', buffer)
235+
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())
236+
237+
dagService.add(fileNode, (err) => {
238+
if (err) {
239+
return log.err(err)
240+
}
241+
242+
callback(null, {
243+
Hash: fileNode.multihash(),
244+
Size: fileNode.size(),
245+
Name: options.filename
246+
})
247+
})
248+
}
249+
}
250+
// function streamImporter (stream, callback) {}
172251
}
173252

174253
exports.export = function () {

tests/browser.js

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
/* eslint-env mocha */
2+
const tests = require('./buffer-test')
3+
const async = require('async')
4+
const store = require('idb-plus-blob-store')
5+
const _ = require('lodash')
6+
const IPFSRepo = require('ipfs-repo')
7+
const repoContext = require.context('buffer!./repo-example', true)
8+
9+
const idb = window.indexedDB ||
10+
window.mozIndexedDB ||
11+
window.webkitIndexedDB ||
12+
window.msIndexedDB
13+
14+
idb.deleteDatabase('ipfs')
15+
idb.deleteDatabase('ipfs/blocks')
16+
17+
describe('IPFS data importing tests on the Browser', function () {
18+
before(function (done) {
19+
this.timeout(23000)
20+
var repoData = []
21+
repoContext.keys().forEach(function (key) {
22+
repoData.push({
23+
key: key.replace('./', ''),
24+
value: repoContext(key)
25+
})
26+
})
27+
28+
const mainBlob = store('ipfs')
29+
const blocksBlob = store('ipfs/blocks')
30+
31+
async.eachSeries(repoData, (file, cb) => {
32+
if (_.startsWith(file.key, 'datastore/')) {
33+
return cb()
34+
}
35+
36+
const blocks = _.startsWith(file.key, 'blocks/')
37+
const blob = blocks ? blocksBlob : mainBlob
38+
const key = blocks ? file.key.replace(/^blocks\//, '') : file.key
39+
40+
blob.createWriteStream({
41+
key: key
42+
}).end(file.value, cb)
43+
}, done)
44+
})
45+
46+
// create the repo constant to be used in the import a small buffer test
47+
const options = {
48+
stores: {
49+
keys: store,
50+
config: store,
51+
datastore: store,
52+
// datastoreLegacy: needs https://github.com/ipfs/js-ipfs-repo/issues/6#issuecomment-164650642
53+
logs: store,
54+
locks: store,
55+
version: store
56+
}
57+
}
58+
const repo = new IPFSRepo('ipfs', options)
59+
tests(repo)
60+
})

0 commit comments

Comments
 (0)